reformat hivesterix code

git-svn-id: https://hyracks.googlecode.com/svn/branches/fullstack_release_cleanup@3069 123451ca-8445-de46-9d55-352943316053
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/ExpressionConstant.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/ExpressionConstant.java
index 3c84566..8fb715b 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/ExpressionConstant.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/ExpressionConstant.java
@@ -4,22 +4,21 @@
  * some constants for expression

  * 

  * @author yingyib

- * 

  */

 public class ExpressionConstant {

 

-	/**

-	 * name space for function identifier

-	 */

-	public static String NAMESPACE = "hive";

+    /**

+     * name space for function identifier

+     */

+    public static String NAMESPACE = "hive";

 

-	/**

-	 * field expression: modeled as function in Algebricks

-	 */

-	public static String FIELDACCESS = "fieldaccess";

+    /**

+     * field expression: modeled as function in Algebricks

+     */

+    public static String FIELDACCESS = "fieldaccess";

 

-	/**

-	 * null string: modeled as null in Algebricks

-	 */

-	public static String NULL = "null";

+    /**

+     * null string: modeled as null in Algebricks

+     */

+    public static String NULL = "null";

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveAlgebricksBuiltInFunctionMap.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveAlgebricksBuiltInFunctionMap.java
index 18380f7..56890eb 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveAlgebricksBuiltInFunctionMap.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveAlgebricksBuiltInFunctionMap.java
@@ -9,75 +9,74 @@
 

 public class HiveAlgebricksBuiltInFunctionMap {

 

-	/**

-	 * hive auqa builtin function map instance

-	 */

-	public static HiveAlgebricksBuiltInFunctionMap INSTANCE = new HiveAlgebricksBuiltInFunctionMap();

+    /**

+     * hive auqa builtin function map instance

+     */

+    public static HiveAlgebricksBuiltInFunctionMap INSTANCE = new HiveAlgebricksBuiltInFunctionMap();

 

-	/**

-	 * hive to Algebricks function name mapping

-	 */

-	private HashMap<String, FunctionIdentifier> hiveToAlgebricksMap = new HashMap<String, FunctionIdentifier>();

+    /**

+     * hive to Algebricks function name mapping

+     */

+    private HashMap<String, FunctionIdentifier> hiveToAlgebricksMap = new HashMap<String, FunctionIdentifier>();

 

-	/**

-	 * Algebricks to hive function name mapping

-	 */

-	private HashMap<FunctionIdentifier, String> AlgebricksToHiveMap = new HashMap<FunctionIdentifier, String>();

+    /**

+     * Algebricks to hive function name mapping

+     */

+    private HashMap<FunctionIdentifier, String> AlgebricksToHiveMap = new HashMap<FunctionIdentifier, String>();

 

-	/**

-	 * the bi-directional mapping between hive functions and Algebricks

-	 * functions

-	 */

-	private HiveAlgebricksBuiltInFunctionMap() {

-		hiveToAlgebricksMap.put("and", AlgebricksBuiltinFunctions.AND);

-		hiveToAlgebricksMap.put("or", AlgebricksBuiltinFunctions.OR);

-		hiveToAlgebricksMap.put("!", AlgebricksBuiltinFunctions.NOT);

-		hiveToAlgebricksMap.put("not", AlgebricksBuiltinFunctions.NOT);

-		hiveToAlgebricksMap.put("=", AlgebricksBuiltinFunctions.EQ);

-		hiveToAlgebricksMap.put("<>", AlgebricksBuiltinFunctions.NEQ);

-		hiveToAlgebricksMap.put(">", AlgebricksBuiltinFunctions.GT);

-		hiveToAlgebricksMap.put("<", AlgebricksBuiltinFunctions.LT);

-		hiveToAlgebricksMap.put(">=", AlgebricksBuiltinFunctions.GE);

-		hiveToAlgebricksMap.put("<=", AlgebricksBuiltinFunctions.LE);

+    /**

+     * the bi-directional mapping between hive functions and Algebricks

+     * functions

+     */

+    private HiveAlgebricksBuiltInFunctionMap() {

+        hiveToAlgebricksMap.put("and", AlgebricksBuiltinFunctions.AND);

+        hiveToAlgebricksMap.put("or", AlgebricksBuiltinFunctions.OR);

+        hiveToAlgebricksMap.put("!", AlgebricksBuiltinFunctions.NOT);

+        hiveToAlgebricksMap.put("not", AlgebricksBuiltinFunctions.NOT);

+        hiveToAlgebricksMap.put("=", AlgebricksBuiltinFunctions.EQ);

+        hiveToAlgebricksMap.put("<>", AlgebricksBuiltinFunctions.NEQ);

+        hiveToAlgebricksMap.put(">", AlgebricksBuiltinFunctions.GT);

+        hiveToAlgebricksMap.put("<", AlgebricksBuiltinFunctions.LT);

+        hiveToAlgebricksMap.put(">=", AlgebricksBuiltinFunctions.GE);

+        hiveToAlgebricksMap.put("<=", AlgebricksBuiltinFunctions.LE);

 

-		AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.AND, "and");

-		AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.OR, "or");

-		AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.NOT, "!");

-		AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.NOT, "not");

-		AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.EQ, "=");

-		AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.NEQ, "<>");

-		AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.GT, ">");

-		AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.LT, "<");

-		AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.GE, ">=");

-		AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.LE, "<=");

-	}

+        AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.AND, "and");

+        AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.OR, "or");

+        AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.NOT, "!");

+        AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.NOT, "not");

+        AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.EQ, "=");

+        AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.NEQ, "<>");

+        AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.GT, ">");

+        AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.LT, "<");

+        AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.GE, ">=");

+        AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.LE, "<=");

+    }

 

-	/**

-	 * get hive function name from Algebricks function identifier

-	 * 

-	 * @param AlgebricksId

-	 * @return hive

-	 */

-	public String getHiveFunctionName(FunctionIdentifier AlgebricksId) {

-		return AlgebricksToHiveMap.get(AlgebricksId);

-	}

+    /**

+     * get hive function name from Algebricks function identifier

+     * 

+     * @param AlgebricksId

+     * @return hive

+     */

+    public String getHiveFunctionName(FunctionIdentifier AlgebricksId) {

+        return AlgebricksToHiveMap.get(AlgebricksId);

+    }

 

-	/**

-	 * get hive UDF or Generic class's corresponding built-in functions

-	 * 

-	 * @param funcClass

-	 * @return function identifier

-	 */

-	public FunctionIdentifier getAlgebricksFunctionId(Class<?> funcClass) {

-		Description annotation = (Description) funcClass

-				.getAnnotation(Description.class);

-		String hiveUDFName = "";

-		if (annotation == null) {

-			hiveUDFName = null;

-			return null;

-		} else {

-			hiveUDFName = annotation.name();

-			return hiveToAlgebricksMap.get(hiveUDFName);

-		}

-	}

+    /**

+     * get hive UDF or Generic class's corresponding built-in functions

+     * 

+     * @param funcClass

+     * @return function identifier

+     */

+    public FunctionIdentifier getAlgebricksFunctionId(Class<?> funcClass) {

+        Description annotation = (Description) funcClass.getAnnotation(Description.class);

+        String hiveUDFName = "";

+        if (annotation == null) {

+            hiveUDFName = null;

+            return null;

+        } else {

+            hiveUDFName = annotation.name();

+            return hiveToAlgebricksMap.get(hiveUDFName);

+        }

+    }

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveExpressionTypeComputer.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveExpressionTypeComputer.java
index afb7d39..e10e8c1 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveExpressionTypeComputer.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveExpressionTypeComputer.java
@@ -36,165 +36,144 @@
 

 public class HiveExpressionTypeComputer implements IExpressionTypeComputer {

 

-	public static IExpressionTypeComputer INSTANCE = new HiveExpressionTypeComputer();

+    public static IExpressionTypeComputer INSTANCE = new HiveExpressionTypeComputer();

 

-	@Override

-	public Object getType(ILogicalExpression expr,

-			IMetadataProvider<?, ?> metadataProvider,

-			IVariableTypeEnvironment env) throws AlgebricksException {

-		if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {

-			/**

-			 * function expression

-			 */

-			AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;

-			IFunctionInfo funcInfo = funcExpr.getFunctionInfo();

+    @Override

+    public Object getType(ILogicalExpression expr, IMetadataProvider<?, ?> metadataProvider,

+            IVariableTypeEnvironment env) throws AlgebricksException {

+        if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {

+            /**

+             * function expression

+             */

+            AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;

+            IFunctionInfo funcInfo = funcExpr.getFunctionInfo();

 

-			/**

-			 * argument expressions, types, object inspectors

-			 */

-			List<Mutable<ILogicalExpression>> arguments = funcExpr

-					.getArguments();

-			List<TypeInfo> argumentTypes = new ArrayList<TypeInfo>();

+            /**

+             * argument expressions, types, object inspectors

+             */

+            List<Mutable<ILogicalExpression>> arguments = funcExpr.getArguments();

+            List<TypeInfo> argumentTypes = new ArrayList<TypeInfo>();

 

-			/**

-			 * get types of argument

-			 */

-			for (Mutable<ILogicalExpression> argument : arguments) {

-				TypeInfo type = (TypeInfo) getType(argument.getValue(),

-						metadataProvider, env);

-				argumentTypes.add(type);

-			}

+            /**

+             * get types of argument

+             */

+            for (Mutable<ILogicalExpression> argument : arguments) {

+                TypeInfo type = (TypeInfo) getType(argument.getValue(), metadataProvider, env);

+                argumentTypes.add(type);

+            }

 

-			ObjectInspector[] childrenOIs = new ObjectInspector[argumentTypes

-					.size()];

+            ObjectInspector[] childrenOIs = new ObjectInspector[argumentTypes.size()];

 

-			/**

-			 * get object inspector

-			 */

-			for (int i = 0; i < argumentTypes.size(); i++) {

-				childrenOIs[i] = TypeInfoUtils

-						.getStandardWritableObjectInspectorFromTypeInfo(argumentTypes

-								.get(i));

-			}

+            /**

+             * get object inspector

+             */

+            for (int i = 0; i < argumentTypes.size(); i++) {

+                childrenOIs[i] = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(argumentTypes.get(i));

+            }

 

-			/**

-			 * type inference for scalar function

-			 */

-			if (funcExpr instanceof ScalarFunctionCallExpression) {

+            /**

+             * type inference for scalar function

+             */

+            if (funcExpr instanceof ScalarFunctionCallExpression) {

 

-				FunctionIdentifier AlgebricksId = funcInfo

-						.getFunctionIdentifier();

-				Object functionInfo = ((HiveFunctionInfo) funcInfo).getInfo();

-				String udfName = HiveAlgebricksBuiltInFunctionMap.INSTANCE

-						.getHiveFunctionName(AlgebricksId);

-				GenericUDF udf;

-				if (udfName != null) {

-					/**

-					 * get corresponding function info for built-in functions

-					 */

-					FunctionInfo fInfo = FunctionRegistry

-							.getFunctionInfo(udfName);

-					udf = fInfo.getGenericUDF();

-				} else if (functionInfo != null) {

-					/**

-					 * for GenericUDFBridge: we should not call get type of this

-					 * hive expression, because parameters may have been

-					 * changed!

-					 */

-					ExprNodeGenericFuncDesc hiveExpr = (ExprNodeGenericFuncDesc) functionInfo;

-					udf = hiveExpr.getGenericUDF();

-				} else {

-					/**

-					 * for other generic UDF

-					 */

-					Class<?> udfClass;

-					try {

-						udfClass = Class.forName(AlgebricksId.getName());

-						udf = (GenericUDF) udfClass.newInstance();

-					} catch (Exception e) {

-						e.printStackTrace();

-						throw new AlgebricksException(e.getMessage());

-					}

-				}

-				/**

-				 * doing the actual type inference

-				 */

-				ObjectInspector oi = null;

-				try {

-					oi = udf.initialize(childrenOIs);

-				} catch (Exception e) {

-					e.printStackTrace();

-				}

+                FunctionIdentifier AlgebricksId = funcInfo.getFunctionIdentifier();

+                Object functionInfo = ((HiveFunctionInfo) funcInfo).getInfo();

+                String udfName = HiveAlgebricksBuiltInFunctionMap.INSTANCE.getHiveFunctionName(AlgebricksId);

+                GenericUDF udf;

+                if (udfName != null) {

+                    /**

+                     * get corresponding function info for built-in functions

+                     */

+                    FunctionInfo fInfo = FunctionRegistry.getFunctionInfo(udfName);

+                    udf = fInfo.getGenericUDF();

+                } else if (functionInfo != null) {

+                    /**

+                     * for GenericUDFBridge: we should not call get type of this

+                     * hive expression, because parameters may have been

+                     * changed!

+                     */

+                    ExprNodeGenericFuncDesc hiveExpr = (ExprNodeGenericFuncDesc) functionInfo;

+                    udf = hiveExpr.getGenericUDF();

+                } else {

+                    /**

+                     * for other generic UDF

+                     */

+                    Class<?> udfClass;

+                    try {

+                        udfClass = Class.forName(AlgebricksId.getName());

+                        udf = (GenericUDF) udfClass.newInstance();

+                    } catch (Exception e) {

+                        e.printStackTrace();

+                        throw new AlgebricksException(e.getMessage());

+                    }

+                }

+                /**

+                 * doing the actual type inference

+                 */

+                ObjectInspector oi = null;

+                try {

+                    oi = udf.initialize(childrenOIs);

+                } catch (Exception e) {

+                    e.printStackTrace();

+                }

 

-				TypeInfo exprType = TypeInfoUtils

-						.getTypeInfoFromObjectInspector(oi);

-				return exprType;

+                TypeInfo exprType = TypeInfoUtils.getTypeInfoFromObjectInspector(oi);

+                return exprType;

 

-			} else if (funcExpr instanceof AggregateFunctionCallExpression) {

-				/**

-				 * hive aggregation info

-				 */

-				AggregationDesc aggregateDesc = (AggregationDesc) ((HiveFunctionInfo) funcExpr

-						.getFunctionInfo()).getInfo();

-				/**

-				 * type inference for aggregation function

-				 */

-				GenericUDAFEvaluator result = aggregateDesc

-						.getGenericUDAFEvaluator();

+            } else if (funcExpr instanceof AggregateFunctionCallExpression) {

+                /**

+                 * hive aggregation info

+                 */

+                AggregationDesc aggregateDesc = (AggregationDesc) ((HiveFunctionInfo) funcExpr.getFunctionInfo())

+                        .getInfo();

+                /**

+                 * type inference for aggregation function

+                 */

+                GenericUDAFEvaluator result = aggregateDesc.getGenericUDAFEvaluator();

 

-				ObjectInspector returnOI = null;

-				try {

-					returnOI = result

-							.init(aggregateDesc.getMode(), childrenOIs);

-				} catch (HiveException e) {

-					e.printStackTrace();

-				}

-				TypeInfo exprType = TypeInfoUtils

-						.getTypeInfoFromObjectInspector(returnOI);

-				return exprType;

-			} else if (funcExpr instanceof UnnestingFunctionCallExpression) {

-				/**

-				 * type inference for UDTF function

-				 */

-				UDTFDesc hiveDesc = (UDTFDesc) ((HiveFunctionInfo) funcExpr

-						.getFunctionInfo()).getInfo();

-				GenericUDTF udtf = hiveDesc.getGenericUDTF();

-				ObjectInspector returnOI = null;

-				try {

-					returnOI = udtf.initialize(childrenOIs);

-				} catch (HiveException e) {

-					e.printStackTrace();

-				}

-				TypeInfo exprType = TypeInfoUtils

-						.getTypeInfoFromObjectInspector(returnOI);

-				return exprType;

-			} else {

-				throw new IllegalStateException(

-						"unrecognized function expression "

-								+ expr.getClass().getName());

-			}

-		} else if (expr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {

-			/**

-			 * get type for variable in the environment

-			 */

-			VariableReferenceExpression varExpr = (VariableReferenceExpression) expr;

-			LogicalVariable var = varExpr.getVariableReference();

-			TypeInfo type = (TypeInfo) env.getVarType(var);

-			return type;

-		} else if (expr.getExpressionTag() == LogicalExpressionTag.CONSTANT) {

-			/**

-			 * get type for constant, from its java class

-			 */

-			ConstantExpression constExpr = (ConstantExpression) expr;

-			HivesterixConstantValue value = (HivesterixConstantValue) constExpr

-					.getValue();

-			TypeInfo type = TypeInfoFactory

-					.getPrimitiveTypeInfoFromJavaPrimitive(value.getObject()

-							.getClass());

-			return type;

-		} else {

-			throw new IllegalStateException("illegal expressions "

-					+ expr.getClass().getName());

-		}

-	}

+                ObjectInspector returnOI = null;

+                try {

+                    returnOI = result.init(aggregateDesc.getMode(), childrenOIs);

+                } catch (HiveException e) {

+                    e.printStackTrace();

+                }

+                TypeInfo exprType = TypeInfoUtils.getTypeInfoFromObjectInspector(returnOI);

+                return exprType;

+            } else if (funcExpr instanceof UnnestingFunctionCallExpression) {

+                /**

+                 * type inference for UDTF function

+                 */

+                UDTFDesc hiveDesc = (UDTFDesc) ((HiveFunctionInfo) funcExpr.getFunctionInfo()).getInfo();

+                GenericUDTF udtf = hiveDesc.getGenericUDTF();

+                ObjectInspector returnOI = null;

+                try {

+                    returnOI = udtf.initialize(childrenOIs);

+                } catch (HiveException e) {

+                    e.printStackTrace();

+                }

+                TypeInfo exprType = TypeInfoUtils.getTypeInfoFromObjectInspector(returnOI);

+                return exprType;

+            } else {

+                throw new IllegalStateException("unrecognized function expression " + expr.getClass().getName());

+            }

+        } else if (expr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {

+            /**

+             * get type for variable in the environment

+             */

+            VariableReferenceExpression varExpr = (VariableReferenceExpression) expr;

+            LogicalVariable var = varExpr.getVariableReference();

+            TypeInfo type = (TypeInfo) env.getVarType(var);

+            return type;

+        } else if (expr.getExpressionTag() == LogicalExpressionTag.CONSTANT) {

+            /**

+             * get type for constant, from its java class

+             */

+            ConstantExpression constExpr = (ConstantExpression) expr;

+            HivesterixConstantValue value = (HivesterixConstantValue) constExpr.getValue();

+            TypeInfo type = TypeInfoFactory.getPrimitiveTypeInfoFromJavaPrimitive(value.getObject().getClass());

+            return type;

+        } else {

+            throw new IllegalStateException("illegal expressions " + expr.getClass().getName());

+        }

+    }

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveFunctionInfo.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveFunctionInfo.java
index 220bd00..ced8d02 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveFunctionInfo.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveFunctionInfo.java
@@ -7,30 +7,30 @@
 

 public class HiveFunctionInfo implements IFunctionInfo, Serializable {

 

-	private static final long serialVersionUID = 1L;

+    private static final long serialVersionUID = 1L;

 

-	/**

-	 * primary function identifier

-	 */

-	private transient FunctionIdentifier fid;

+    /**

+     * primary function identifier

+     */

+    private transient FunctionIdentifier fid;

 

-	/**

-	 * secondary function identifier: function name

-	 */

-	private transient Object secondaryFid;

+    /**

+     * secondary function identifier: function name

+     */

+    private transient Object secondaryFid;

 

-	public HiveFunctionInfo(FunctionIdentifier fid, Object secondFid) {

-		this.fid = fid;

-		this.secondaryFid = secondFid;

-	}

+    public HiveFunctionInfo(FunctionIdentifier fid, Object secondFid) {

+        this.fid = fid;

+        this.secondaryFid = secondFid;

+    }

 

-	@Override

-	public FunctionIdentifier getFunctionIdentifier() {

-		return fid;

-	}

+    @Override

+    public FunctionIdentifier getFunctionIdentifier() {

+        return fid;

+    }

 

-	public Object getInfo() {

-		return secondaryFid;

-	}

+    public Object getInfo() {

+        return secondaryFid;

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveMergeAggregationExpressionFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveMergeAggregationExpressionFactory.java
index 8dea691..b77fe49 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveMergeAggregationExpressionFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveMergeAggregationExpressionFactory.java
@@ -24,61 +24,51 @@
  * generate merge aggregation expression from an aggregation expression
  * 
  * @author yingyib
- * 
  */
-public class HiveMergeAggregationExpressionFactory implements
-		IMergeAggregationExpressionFactory {
+public class HiveMergeAggregationExpressionFactory implements IMergeAggregationExpressionFactory {
 
-	public static IMergeAggregationExpressionFactory INSTANCE = new HiveMergeAggregationExpressionFactory();
+    public static IMergeAggregationExpressionFactory INSTANCE = new HiveMergeAggregationExpressionFactory();
 
-	@Override
-	public ILogicalExpression createMergeAggregation(ILogicalExpression expr,
-			IOptimizationContext context) throws AlgebricksException {
-		/**
-		 * type inference for scalar function
-		 */
-		if (expr instanceof AggregateFunctionCallExpression) {
-			AggregateFunctionCallExpression funcExpr = (AggregateFunctionCallExpression) expr;
-			/**
-			 * hive aggregation info
-			 */
-			AggregationDesc aggregator = (AggregationDesc) ((HiveFunctionInfo) funcExpr
-					.getFunctionInfo()).getInfo();
-			LogicalVariable inputVar = context.newVar();
-			ExprNodeDesc col = new ExprNodeColumnDesc(
-					TypeInfoFactory.voidTypeInfo, inputVar.toString(), null,
-					false);
-			ArrayList<ExprNodeDesc> parameters = new ArrayList<ExprNodeDesc>();
-			parameters.add(col);
+    @Override
+    public ILogicalExpression createMergeAggregation(ILogicalExpression expr, IOptimizationContext context)
+            throws AlgebricksException {
+        /**
+         * type inference for scalar function
+         */
+        if (expr instanceof AggregateFunctionCallExpression) {
+            AggregateFunctionCallExpression funcExpr = (AggregateFunctionCallExpression) expr;
+            /**
+             * hive aggregation info
+             */
+            AggregationDesc aggregator = (AggregationDesc) ((HiveFunctionInfo) funcExpr.getFunctionInfo()).getInfo();
+            LogicalVariable inputVar = context.newVar();
+            ExprNodeDesc col = new ExprNodeColumnDesc(TypeInfoFactory.voidTypeInfo, inputVar.toString(), null, false);
+            ArrayList<ExprNodeDesc> parameters = new ArrayList<ExprNodeDesc>();
+            parameters.add(col);
 
-			GenericUDAFEvaluator.Mode mergeMode;
-			if (aggregator.getMode() == GenericUDAFEvaluator.Mode.PARTIAL1)
-				mergeMode = GenericUDAFEvaluator.Mode.PARTIAL2;
-			else if (aggregator.getMode() == GenericUDAFEvaluator.Mode.COMPLETE)
-				mergeMode = GenericUDAFEvaluator.Mode.FINAL;
-			else
-				mergeMode = aggregator.getMode();
-			AggregationDesc mergeDesc = new AggregationDesc(
-					aggregator.getGenericUDAFName(),
-					aggregator.getGenericUDAFEvaluator(), parameters,
-					aggregator.getDistinct(), mergeMode);
+            GenericUDAFEvaluator.Mode mergeMode;
+            if (aggregator.getMode() == GenericUDAFEvaluator.Mode.PARTIAL1)
+                mergeMode = GenericUDAFEvaluator.Mode.PARTIAL2;
+            else if (aggregator.getMode() == GenericUDAFEvaluator.Mode.COMPLETE)
+                mergeMode = GenericUDAFEvaluator.Mode.FINAL;
+            else
+                mergeMode = aggregator.getMode();
+            AggregationDesc mergeDesc = new AggregationDesc(aggregator.getGenericUDAFName(),
+                    aggregator.getGenericUDAFEvaluator(), parameters, aggregator.getDistinct(), mergeMode);
 
-			String UDAFName = mergeDesc.getGenericUDAFName();
-			List<Mutable<ILogicalExpression>> arguments = new ArrayList<Mutable<ILogicalExpression>>();
-			arguments.add(new MutableObject<ILogicalExpression>(
-					new VariableReferenceExpression(inputVar)));
+            String UDAFName = mergeDesc.getGenericUDAFName();
+            List<Mutable<ILogicalExpression>> arguments = new ArrayList<Mutable<ILogicalExpression>>();
+            arguments.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(inputVar)));
 
-			FunctionIdentifier funcId = new FunctionIdentifier(
-					ExpressionConstant.NAMESPACE, UDAFName + "("
-							+ mergeDesc.getMode() + ")");
-			HiveFunctionInfo funcInfo = new HiveFunctionInfo(funcId, mergeDesc);
-			AggregateFunctionCallExpression aggregationExpression = new AggregateFunctionCallExpression(
-					funcInfo, false, arguments);
-			return aggregationExpression;
-		} else {
-			throw new IllegalStateException("illegal expressions "
-					+ expr.getClass().getName());
-		}
-	}
+            FunctionIdentifier funcId = new FunctionIdentifier(ExpressionConstant.NAMESPACE, UDAFName + "("
+                    + mergeDesc.getMode() + ")");
+            HiveFunctionInfo funcInfo = new HiveFunctionInfo(funcId, mergeDesc);
+            AggregateFunctionCallExpression aggregationExpression = new AggregateFunctionCallExpression(funcInfo,
+                    false, arguments);
+            return aggregationExpression;
+        } else {
+            throw new IllegalStateException("illegal expressions " + expr.getClass().getName());
+        }
+    }
 
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveNullableTypeComputer.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveNullableTypeComputer.java
index 10c9b8a..906e3ce 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveNullableTypeComputer.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveNullableTypeComputer.java
@@ -5,11 +5,11 @@
 
 public class HiveNullableTypeComputer implements INullableTypeComputer {
 
-	public static INullableTypeComputer INSTANCE = new HiveNullableTypeComputer();
+    public static INullableTypeComputer INSTANCE = new HiveNullableTypeComputer();
 
-	@Override
-	public Object makeNullableType(Object type) throws AlgebricksException {
-		return type;
-	}
+    @Override
+    public Object makeNullableType(Object type) throws AlgebricksException {
+        return type;
+    }
 
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HivePartialAggregationTypeComputer.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HivePartialAggregationTypeComputer.java
index 7062e26..c74966c 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HivePartialAggregationTypeComputer.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HivePartialAggregationTypeComputer.java
@@ -22,95 +22,81 @@
 import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
 import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
 
-public class HivePartialAggregationTypeComputer implements
-		IPartialAggregationTypeComputer {
+public class HivePartialAggregationTypeComputer implements IPartialAggregationTypeComputer {
 
-	public static IPartialAggregationTypeComputer INSTANCE = new HivePartialAggregationTypeComputer();
+    public static IPartialAggregationTypeComputer INSTANCE = new HivePartialAggregationTypeComputer();
 
-	@Override
-	public Object getType(ILogicalExpression expr,
-			IVariableTypeEnvironment env,
-			IMetadataProvider<?, ?> metadataProvider)
-			throws AlgebricksException {
-		if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-			IExpressionTypeComputer tc = HiveExpressionTypeComputer.INSTANCE;
-			/**
-			 * function expression
-			 */
-			AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
+    @Override
+    public Object getType(ILogicalExpression expr, IVariableTypeEnvironment env,
+            IMetadataProvider<?, ?> metadataProvider) throws AlgebricksException {
+        if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+            IExpressionTypeComputer tc = HiveExpressionTypeComputer.INSTANCE;
+            /**
+             * function expression
+             */
+            AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
 
-			/**
-			 * argument expressions, types, object inspectors
-			 */
-			List<Mutable<ILogicalExpression>> arguments = funcExpr
-					.getArguments();
-			List<TypeInfo> argumentTypes = new ArrayList<TypeInfo>();
+            /**
+             * argument expressions, types, object inspectors
+             */
+            List<Mutable<ILogicalExpression>> arguments = funcExpr.getArguments();
+            List<TypeInfo> argumentTypes = new ArrayList<TypeInfo>();
 
-			/**
-			 * get types of argument
-			 */
-			for (Mutable<ILogicalExpression> argument : arguments) {
-				TypeInfo type = (TypeInfo) tc.getType(argument.getValue(),
-						metadataProvider, env);
-				argumentTypes.add(type);
-			}
+            /**
+             * get types of argument
+             */
+            for (Mutable<ILogicalExpression> argument : arguments) {
+                TypeInfo type = (TypeInfo) tc.getType(argument.getValue(), metadataProvider, env);
+                argumentTypes.add(type);
+            }
 
-			ObjectInspector[] childrenOIs = new ObjectInspector[argumentTypes
-					.size()];
+            ObjectInspector[] childrenOIs = new ObjectInspector[argumentTypes.size()];
 
-			/**
-			 * get object inspector
-			 */
-			for (int i = 0; i < argumentTypes.size(); i++) {
-				childrenOIs[i] = TypeInfoUtils
-						.getStandardWritableObjectInspectorFromTypeInfo(argumentTypes
-								.get(i));
-			}
+            /**
+             * get object inspector
+             */
+            for (int i = 0; i < argumentTypes.size(); i++) {
+                childrenOIs[i] = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(argumentTypes.get(i));
+            }
 
-			/**
-			 * type inference for scalar function
-			 */
-			if (funcExpr instanceof AggregateFunctionCallExpression) {
-				/**
-				 * hive aggregation info
-				 */
-				AggregationDesc aggregateDesc = (AggregationDesc) ((HiveFunctionInfo) funcExpr
-						.getFunctionInfo()).getInfo();
-				/**
-				 * type inference for aggregation function
-				 */
-				GenericUDAFEvaluator result = aggregateDesc
-						.getGenericUDAFEvaluator();
+            /**
+             * type inference for scalar function
+             */
+            if (funcExpr instanceof AggregateFunctionCallExpression) {
+                /**
+                 * hive aggregation info
+                 */
+                AggregationDesc aggregateDesc = (AggregationDesc) ((HiveFunctionInfo) funcExpr.getFunctionInfo())
+                        .getInfo();
+                /**
+                 * type inference for aggregation function
+                 */
+                GenericUDAFEvaluator result = aggregateDesc.getGenericUDAFEvaluator();
 
-				ObjectInspector returnOI = null;
-				try {
-					returnOI = result.init(
-							getPartialMode(aggregateDesc.getMode()),
-							childrenOIs);
-				} catch (HiveException e) {
-					e.printStackTrace();
-				}
-				TypeInfo exprType = TypeInfoUtils
-						.getTypeInfoFromObjectInspector(returnOI);
-				return exprType;
-			} else {
-				throw new IllegalStateException("illegal expressions "
-						+ expr.getClass().getName());
-			}
-		} else {
-			throw new IllegalStateException("illegal expressions "
-					+ expr.getClass().getName());
-		}
-	}
+                ObjectInspector returnOI = null;
+                try {
+                    returnOI = result.init(getPartialMode(aggregateDesc.getMode()), childrenOIs);
+                } catch (HiveException e) {
+                    e.printStackTrace();
+                }
+                TypeInfo exprType = TypeInfoUtils.getTypeInfoFromObjectInspector(returnOI);
+                return exprType;
+            } else {
+                throw new IllegalStateException("illegal expressions " + expr.getClass().getName());
+            }
+        } else {
+            throw new IllegalStateException("illegal expressions " + expr.getClass().getName());
+        }
+    }
 
-	private Mode getPartialMode(Mode mode) {
-		Mode partialMode;
-		if (mode == Mode.FINAL)
-			partialMode = Mode.PARTIAL2;
-		else if (mode == Mode.COMPLETE)
-			partialMode = Mode.PARTIAL1;
-		else
-			partialMode = mode;
-		return partialMode;
-	}
+    private Mode getPartialMode(Mode mode) {
+        Mode partialMode;
+        if (mode == Mode.FINAL)
+            partialMode = Mode.PARTIAL2;
+        else if (mode == Mode.COMPLETE)
+            partialMode = Mode.PARTIAL1;
+        else
+            partialMode = mode;
+        return partialMode;
+    }
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HivesterixConstantValue.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HivesterixConstantValue.java
index de9cea6..3d35e1f 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HivesterixConstantValue.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HivesterixConstantValue.java
@@ -4,52 +4,52 @@
 
 public class HivesterixConstantValue implements IAlgebricksConstantValue {
 
-	private Object object;
+    private Object object;
 
-	public HivesterixConstantValue(Object object) {
-		this.setObject(object);
-	}
+    public HivesterixConstantValue(Object object) {
+        this.setObject(object);
+    }
 
-	@Override
-	public boolean isFalse() {
-		return object == Boolean.FALSE;
-	}
+    @Override
+    public boolean isFalse() {
+        return object == Boolean.FALSE;
+    }
 
-	@Override
-	public boolean isNull() {
-		return object == null;
-	}
+    @Override
+    public boolean isNull() {
+        return object == null;
+    }
 
-	@Override
-	public boolean isTrue() {
-		return object == Boolean.TRUE;
-	}
+    @Override
+    public boolean isTrue() {
+        return object == Boolean.TRUE;
+    }
 
-	public void setObject(Object object) {
-		this.object = object;
-	}
+    public void setObject(Object object) {
+        this.object = object;
+    }
 
-	public Object getObject() {
-		return object;
-	}
+    public Object getObject() {
+        return object;
+    }
 
-	@Override
-	public String toString() {
-		return object.toString();
-	}
+    @Override
+    public String toString() {
+        return object.toString();
+    }
 
-	@Override
-	public boolean equals(Object o) {
-		if (!(o instanceof HivesterixConstantValue)) {
-			return false;
-		}
-		HivesterixConstantValue v2 = (HivesterixConstantValue) o;
-		return object.equals(v2.getObject());
-	}
+    @Override
+    public boolean equals(Object o) {
+        if (!(o instanceof HivesterixConstantValue)) {
+            return false;
+        }
+        HivesterixConstantValue v2 = (HivesterixConstantValue) o;
+        return object.equals(v2.getObject());
+    }
 
-	@Override
-	public int hashCode() {
-		return object.hashCode();
-	}
+    @Override
+    public int hashCode() {
+        return object.hashCode();
+    }
 
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/Schema.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/Schema.java
index 2b1d191..3982d33 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/Schema.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/Schema.java
@@ -10,30 +10,30 @@
 

 public class Schema implements Serializable {

 

-	private static final long serialVersionUID = 1L;

+    private static final long serialVersionUID = 1L;

 

-	private List<String> fieldNames;

+    private List<String> fieldNames;

 

-	private List<TypeInfo> fieldTypes;

+    private List<TypeInfo> fieldTypes;

 

-	public Schema(List<String> fieldNames, List<TypeInfo> fieldTypes) {

-		this.fieldNames = fieldNames;

-		this.fieldTypes = fieldTypes;

-	}

+    public Schema(List<String> fieldNames, List<TypeInfo> fieldTypes) {

+        this.fieldNames = fieldNames;

+        this.fieldTypes = fieldTypes;

+    }

 

-	public ObjectInspector toObjectInspector() {

-		return LazyUtils.getLazyObjectInspector(fieldNames, fieldTypes);

-	}

+    public ObjectInspector toObjectInspector() {

+        return LazyUtils.getLazyObjectInspector(fieldNames, fieldTypes);

+    }

 

-	public List<String> getNames() {

-		return fieldNames;

-	}

+    public List<String> getNames() {

+        return fieldNames;

+    }

 

-	public List<TypeInfo> getTypes() {

-		return fieldTypes;

-	}

+    public List<TypeInfo> getTypes() {

+        return fieldTypes;

+    }

 

-	public Object[] getSchema() {

-		return fieldTypes.toArray();

-	}

+    public Object[] getSchema() {

+        return fieldTypes.toArray();

+    }

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveAlgebricksTranslator.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveAlgebricksTranslator.java
index 2765e44..409ace3 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveAlgebricksTranslator.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveAlgebricksTranslator.java
@@ -80,770 +80,728 @@
 @SuppressWarnings("rawtypes")

 public class HiveAlgebricksTranslator implements Translator {

 

-	private int currentVariable = 0;

+    private int currentVariable = 0;

 

-	private List<Mutable<ILogicalOperator>> logicalOp = new ArrayList<Mutable<ILogicalOperator>>();

+    private List<Mutable<ILogicalOperator>> logicalOp = new ArrayList<Mutable<ILogicalOperator>>();

 

-	private boolean continueTraverse = true;

+    private boolean continueTraverse = true;

 

-	private IMetadataProvider<PartitionDesc, Object> metaData;

+    private IMetadataProvider<PartitionDesc, Object> metaData;

 

-	/**

-	 * map variable name to the logical variable

-	 */

-	private HashMap<String, LogicalVariable> nameToLogicalVariableMap = new HashMap<String, LogicalVariable>();

+    /**

+     * map variable name to the logical variable

+     */

+    private HashMap<String, LogicalVariable> nameToLogicalVariableMap = new HashMap<String, LogicalVariable>();

 

-	/**

-	 * map field name to LogicalVariable

-	 */

-	private HashMap<String, LogicalVariable> fieldToLogicalVariableMap = new HashMap<String, LogicalVariable>();

+    /**

+     * map field name to LogicalVariable

+     */

+    private HashMap<String, LogicalVariable> fieldToLogicalVariableMap = new HashMap<String, LogicalVariable>();

 

-	/**

-	 * map logical variable to name

-	 */

-	private HashMap<LogicalVariable, String> logicalVariableToFieldMap = new HashMap<LogicalVariable, String>();

+    /**

+     * map logical variable to name

+     */

+    private HashMap<LogicalVariable, String> logicalVariableToFieldMap = new HashMap<LogicalVariable, String>();

 

-	/**

-	 * asterix root operators

-	 */

-	private List<Mutable<ILogicalOperator>> rootOperators = new ArrayList<Mutable<ILogicalOperator>>();

+    /**

+     * asterix root operators

+     */

+    private List<Mutable<ILogicalOperator>> rootOperators = new ArrayList<Mutable<ILogicalOperator>>();

 

-	/**

-	 * a list of visitors

-	 */

-	private List<Visitor> visitors = new ArrayList<Visitor>();

+    /**

+     * a list of visitors

+     */

+    private List<Visitor> visitors = new ArrayList<Visitor>();

 

-	/**

-	 * output writer to print things out

-	 */

-	private static PrintWriter outputWriter = new PrintWriter(

-			new OutputStreamWriter(System.out));

+    /**

+     * output writer to print things out

+     */

+    private static PrintWriter outputWriter = new PrintWriter(new OutputStreamWriter(System.out));

 

-	/**

-	 * map a logical variable to type info

-	 */

-	private HashMap<LogicalVariable, TypeInfo> variableToType = new HashMap<LogicalVariable, TypeInfo>();

+    /**

+     * map a logical variable to type info

+     */

+    private HashMap<LogicalVariable, TypeInfo> variableToType = new HashMap<LogicalVariable, TypeInfo>();

 

-	@Override

-	public LogicalVariable getVariable(String fieldName, TypeInfo type) {

-		LogicalVariable var = fieldToLogicalVariableMap.get(fieldName);

-		if (var == null) {

-			currentVariable++;

-			var = new LogicalVariable(currentVariable);

-			fieldToLogicalVariableMap.put(fieldName, var);

-			nameToLogicalVariableMap.put(var.toString(), var);

-			variableToType.put(var, type);

-			logicalVariableToFieldMap.put(var, fieldName);

-		}

-		return var;

-	}

+    @Override

+    public LogicalVariable getVariable(String fieldName, TypeInfo type) {

+        LogicalVariable var = fieldToLogicalVariableMap.get(fieldName);

+        if (var == null) {

+            currentVariable++;

+            var = new LogicalVariable(currentVariable);

+            fieldToLogicalVariableMap.put(fieldName, var);

+            nameToLogicalVariableMap.put(var.toString(), var);

+            variableToType.put(var, type);

+            logicalVariableToFieldMap.put(var, fieldName);

+        }

+        return var;

+    }

 

-	@Override

-	public LogicalVariable getNewVariable(String fieldName, TypeInfo type) {

-		currentVariable++;

-		LogicalVariable var = new LogicalVariable(currentVariable);

-		fieldToLogicalVariableMap.put(fieldName, var);

-		nameToLogicalVariableMap.put(var.toString(), var);

-		variableToType.put(var, type);

-		logicalVariableToFieldMap.put(var, fieldName);

-		return var;

-	}

+    @Override

+    public LogicalVariable getNewVariable(String fieldName, TypeInfo type) {

+        currentVariable++;

+        LogicalVariable var = new LogicalVariable(currentVariable);

+        fieldToLogicalVariableMap.put(fieldName, var);

+        nameToLogicalVariableMap.put(var.toString(), var);

+        variableToType.put(var, type);

+        logicalVariableToFieldMap.put(var, fieldName);

+        return var;

+    }

 

-	@Override

-	public void replaceVariable(LogicalVariable oldVar, LogicalVariable newVar) {

-		String name = this.logicalVariableToFieldMap.get(oldVar);

-		if (name != null) {

-			fieldToLogicalVariableMap.put(name, newVar);

-			nameToLogicalVariableMap.put(newVar.toString(), newVar);

-			nameToLogicalVariableMap.put(oldVar.toString(), newVar);

-			logicalVariableToFieldMap.put(newVar, name);

-		}

-	}

+    @Override

+    public void replaceVariable(LogicalVariable oldVar, LogicalVariable newVar) {

+        String name = this.logicalVariableToFieldMap.get(oldVar);

+        if (name != null) {

+            fieldToLogicalVariableMap.put(name, newVar);

+            nameToLogicalVariableMap.put(newVar.toString(), newVar);

+            nameToLogicalVariableMap.put(oldVar.toString(), newVar);

+            logicalVariableToFieldMap.put(newVar, name);

+        }

+    }

 

-	@Override

-	public IMetadataProvider<PartitionDesc, Object> getMetadataProvider() {

-		return metaData;

-	}

+    @Override

+    public IMetadataProvider<PartitionDesc, Object> getMetadataProvider() {

+        return metaData;

+    }

 

-	/**

-	 * only get an variable, without rewriting it

-	 * 

-	 * @param fieldName

-	 * @return

-	 */

-	private LogicalVariable getVariableOnly(String fieldName) {

-		return fieldToLogicalVariableMap.get(fieldName);

-	}

+    /**

+     * only get an variable, without rewriting it

+     * 

+     * @param fieldName

+     * @return

+     */

+    private LogicalVariable getVariableOnly(String fieldName) {

+        return fieldToLogicalVariableMap.get(fieldName);

+    }

 

-	private void updateVariable(String fieldName, LogicalVariable variable) {

-		LogicalVariable var = fieldToLogicalVariableMap.get(fieldName);

-		if (var == null) {

-			fieldToLogicalVariableMap.put(fieldName, variable);

-			nameToLogicalVariableMap.put(fieldName, variable);

-		} else if (!var.equals(variable)) {

-			fieldToLogicalVariableMap.put(fieldName, variable);

-			nameToLogicalVariableMap.put(fieldName, variable);

-		}

-	}

+    private void updateVariable(String fieldName, LogicalVariable variable) {

+        LogicalVariable var = fieldToLogicalVariableMap.get(fieldName);

+        if (var == null) {

+            fieldToLogicalVariableMap.put(fieldName, variable);

+            nameToLogicalVariableMap.put(fieldName, variable);

+        } else if (!var.equals(variable)) {

+            fieldToLogicalVariableMap.put(fieldName, variable);

+            nameToLogicalVariableMap.put(fieldName, variable);

+        }

+    }

 

-	/**

-	 * get a list of logical variables from the schema

-	 * 

-	 * @param schema

-	 * @return

-	 */

-	@Override

-	public List<LogicalVariable> getVariablesFromSchema(Schema schema) {

-		List<LogicalVariable> variables = new ArrayList<LogicalVariable>();

-		List<String> names = schema.getNames();

+    /**

+     * get a list of logical variables from the schema

+     * 

+     * @param schema

+     * @return

+     */

+    @Override

+    public List<LogicalVariable> getVariablesFromSchema(Schema schema) {

+        List<LogicalVariable> variables = new ArrayList<LogicalVariable>();

+        List<String> names = schema.getNames();

 

-		for (String name : names)

-			variables.add(nameToLogicalVariableMap.get(name));

-		return variables;

-	}

+        for (String name : names)

+            variables.add(nameToLogicalVariableMap.get(name));

+        return variables;

+    }

 

-	/**

-	 * get variable to typeinfo map

-	 * 

-	 * @return

-	 */

-	public HashMap<LogicalVariable, TypeInfo> getVariableContext() {

-		return this.variableToType;

-	}

+    /**

+     * get variable to typeinfo map

+     * 

+     * @return

+     */

+    public HashMap<LogicalVariable, TypeInfo> getVariableContext() {

+        return this.variableToType;

+    }

 

-	/**

-	 * get the number of variables s

-	 * 

-	 * @return

-	 */

-	public int getVariableCounter() {

-		return currentVariable + 1;

-	}

+    /**

+     * get the number of variables s

+     * 

+     * @return

+     */

+    public int getVariableCounter() {

+        return currentVariable + 1;

+    }

 

-	/**

-	 * translate from hive operator tree to asterix operator tree

-	 * 

-	 * @param hive

-	 *            roots

-	 * @return Algebricks roots

-	 */

-	public void translate(List<Operator> hiveRoot,

-			ILogicalOperator parentOperator,

-			HashMap<String, PartitionDesc> aliasToPathMap)

-			throws AlgebricksException {

-		/**

-		 * register visitors

-		 */

-		visitors.add(new FilterVisitor());

-		visitors.add(new GroupByVisitor());

-		visitors.add(new JoinVisitor());

-		visitors.add(new LateralViewJoinVisitor());

-		visitors.add(new UnionVisitor());

-		visitors.add(new LimitVisitor());

-		visitors.add(new MapJoinVisitor());

-		visitors.add(new ProjectVisitor());

-		visitors.add(new SortVisitor());

-		visitors.add(new ExtractVisitor());

-		visitors.add(new TableScanWriteVisitor(aliasToPathMap));

+    /**

+     * translate from hive operator tree to asterix operator tree

+     * 

+     * @param hive

+     *            roots

+     * @return Algebricks roots

+     */

+    public void translate(List<Operator> hiveRoot, ILogicalOperator parentOperator,

+            HashMap<String, PartitionDesc> aliasToPathMap) throws AlgebricksException {

+        /**

+         * register visitors

+         */

+        visitors.add(new FilterVisitor());

+        visitors.add(new GroupByVisitor());

+        visitors.add(new JoinVisitor());

+        visitors.add(new LateralViewJoinVisitor());

+        visitors.add(new UnionVisitor());

+        visitors.add(new LimitVisitor());

+        visitors.add(new MapJoinVisitor());

+        visitors.add(new ProjectVisitor());

+        visitors.add(new SortVisitor());

+        visitors.add(new ExtractVisitor());

+        visitors.add(new TableScanWriteVisitor(aliasToPathMap));

 

-		List<Mutable<ILogicalOperator>> refList = translate(hiveRoot,

-				new MutableObject<ILogicalOperator>(parentOperator));

-		insertReplicateOperator(refList);

-		if (refList != null)

-			rootOperators.addAll(refList);

-	}

+        List<Mutable<ILogicalOperator>> refList = translate(hiveRoot, new MutableObject<ILogicalOperator>(

+                parentOperator));

+        insertReplicateOperator(refList);

+        if (refList != null)

+            rootOperators.addAll(refList);

+    }

 

-	/**

-	 * translate operator DAG

-	 * 

-	 * @param hiveRoot

-	 * @param AlgebricksParentOperator

-	 * @return

-	 */

-	private List<Mutable<ILogicalOperator>> translate(List<Operator> hiveRoot,

-			Mutable<ILogicalOperator> AlgebricksParentOperator)

-			throws AlgebricksException {

+    /**

+     * translate operator DAG

+     * 

+     * @param hiveRoot

+     * @param AlgebricksParentOperator

+     * @return

+     */

+    private List<Mutable<ILogicalOperator>> translate(List<Operator> hiveRoot,

+            Mutable<ILogicalOperator> AlgebricksParentOperator) throws AlgebricksException {

 

-		for (Operator hiveOperator : hiveRoot) {

-			continueTraverse = true;

-			Mutable<ILogicalOperator> currentOperatorRef = null;

-			if (hiveOperator.getType() == OperatorType.FILTER) {

-				FilterOperator fop = (FilterOperator) hiveOperator;

-				for (Visitor visitor : visitors) {

-					currentOperatorRef = visitor.visit(fop,

-							AlgebricksParentOperator, this);

-					if (currentOperatorRef != null)

-						break;

-				}

-			} else if (hiveOperator.getType() == OperatorType.REDUCESINK) {

-				ReduceSinkOperator fop = (ReduceSinkOperator) hiveOperator;

-				for (Visitor visitor : visitors) {

-					currentOperatorRef = visitor.visit(fop,

-							AlgebricksParentOperator, this);

-					if (currentOperatorRef != null)

-						break;

-				}

-			} else if (hiveOperator.getType() == OperatorType.JOIN) {

-				JoinOperator fop = (JoinOperator) hiveOperator;

-				for (Visitor visitor : visitors) {

-					currentOperatorRef = visitor.visit(fop,

-							AlgebricksParentOperator, this);

-					if (currentOperatorRef != null) {

-						continueTraverse = true;

-						break;

-					} else

-						continueTraverse = false;

-				}

-				if (currentOperatorRef == null)

-					return null;

-			} else if (hiveOperator.getType() == OperatorType.LATERALVIEWJOIN) {

-				LateralViewJoinOperator fop = (LateralViewJoinOperator) hiveOperator;

-				for (Visitor visitor : visitors) {

-					currentOperatorRef = visitor.visit(fop,

-							AlgebricksParentOperator, this);

-					if (currentOperatorRef != null)

-						break;

-				}

-				if (currentOperatorRef == null)

-					return null;

-			} else if (hiveOperator.getType() == OperatorType.MAPJOIN) {

-				MapJoinOperator fop = (MapJoinOperator) hiveOperator;

-				for (Visitor visitor : visitors) {

-					currentOperatorRef = visitor.visit(fop,

-							AlgebricksParentOperator, this);

-					if (currentOperatorRef != null) {

-						continueTraverse = true;

-						break;

-					} else

-						continueTraverse = false;

-				}

-				if (currentOperatorRef == null)

-					return null;

-			} else if (hiveOperator.getType() == OperatorType.SELECT) {

-				SelectOperator fop = (SelectOperator) hiveOperator;

-				for (Visitor visitor : visitors) {

-					currentOperatorRef = visitor.visit(fop,

-							AlgebricksParentOperator, this);

-					if (currentOperatorRef != null)

-						break;

-				}

-			} else if (hiveOperator.getType() == OperatorType.EXTRACT) {

-				ExtractOperator fop = (ExtractOperator) hiveOperator;

-				for (Visitor visitor : visitors) {

-					currentOperatorRef = visitor.visit(fop,

-							AlgebricksParentOperator, this);

-					if (currentOperatorRef != null)

-						break;

-				}

-			} else if (hiveOperator.getType() == OperatorType.GROUPBY) {

-				GroupByOperator fop = (GroupByOperator) hiveOperator;

-				for (Visitor visitor : visitors) {

-					currentOperatorRef = visitor.visit(fop,

-							AlgebricksParentOperator, this);

-					if (currentOperatorRef != null)

-						break;

-				}

-			} else if (hiveOperator.getType() == OperatorType.TABLESCAN) {

-				TableScanOperator fop = (TableScanOperator) hiveOperator;

-				for (Visitor visitor : visitors) {

-					currentOperatorRef = visitor.visit(fop,

-							AlgebricksParentOperator, this);

-					if (currentOperatorRef != null)

-						break;

-				}

-			} else if (hiveOperator.getType() == OperatorType.FILESINK) {

-				FileSinkOperator fop = (FileSinkOperator) hiveOperator;

-				for (Visitor visitor : visitors) {

-					currentOperatorRef = visitor.visit(fop,

-							AlgebricksParentOperator, this);

-					if (currentOperatorRef != null)

-						break;

-				}

-			} else if (hiveOperator.getType() == OperatorType.LIMIT) {

-				LimitOperator lop = (LimitOperator) hiveOperator;

-				for (Visitor visitor : visitors) {

-					currentOperatorRef = visitor.visit(lop,

-							AlgebricksParentOperator, this);

-					if (currentOperatorRef != null)

-						break;

-				}

-			} else if (hiveOperator.getType() == OperatorType.UDTF) {

-				UDTFOperator lop = (UDTFOperator) hiveOperator;

-				for (Visitor visitor : visitors) {

-					currentOperatorRef = visitor.visit(lop,

-							AlgebricksParentOperator, this);

-					if (currentOperatorRef != null)

-						break;

-				}

-			} else if (hiveOperator.getType() == OperatorType.UNION) {

-				UnionOperator lop = (UnionOperator) hiveOperator;

-				for (Visitor visitor : visitors) {

-					currentOperatorRef = visitor.visit(lop,

-							AlgebricksParentOperator, this);

-					if (currentOperatorRef != null) {

-						continueTraverse = true;

-						break;

-					} else

-						continueTraverse = false;

-				}

-			} else

-				;

-			if (hiveOperator.getChildOperators() != null

-					&& hiveOperator.getChildOperators().size() > 0

-					&& continueTraverse) {

-				@SuppressWarnings("unchecked")

-				List<Operator> children = hiveOperator.getChildOperators();

-				if (currentOperatorRef == null)

-					currentOperatorRef = AlgebricksParentOperator;

-				translate(children, currentOperatorRef);

-			}

-			if (hiveOperator.getChildOperators() == null

-					|| hiveOperator.getChildOperators().size() == 0)

-				logicalOp.add(currentOperatorRef);

-		}

-		return logicalOp;

-	}

+        for (Operator hiveOperator : hiveRoot) {

+            continueTraverse = true;

+            Mutable<ILogicalOperator> currentOperatorRef = null;

+            if (hiveOperator.getType() == OperatorType.FILTER) {

+                FilterOperator fop = (FilterOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null)

+                        break;

+                }

+            } else if (hiveOperator.getType() == OperatorType.REDUCESINK) {

+                ReduceSinkOperator fop = (ReduceSinkOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null)

+                        break;

+                }

+            } else if (hiveOperator.getType() == OperatorType.JOIN) {

+                JoinOperator fop = (JoinOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null) {

+                        continueTraverse = true;

+                        break;

+                    } else

+                        continueTraverse = false;

+                }

+                if (currentOperatorRef == null)

+                    return null;

+            } else if (hiveOperator.getType() == OperatorType.LATERALVIEWJOIN) {

+                LateralViewJoinOperator fop = (LateralViewJoinOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null)

+                        break;

+                }

+                if (currentOperatorRef == null)

+                    return null;

+            } else if (hiveOperator.getType() == OperatorType.MAPJOIN) {

+                MapJoinOperator fop = (MapJoinOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null) {

+                        continueTraverse = true;

+                        break;

+                    } else

+                        continueTraverse = false;

+                }

+                if (currentOperatorRef == null)

+                    return null;

+            } else if (hiveOperator.getType() == OperatorType.SELECT) {

+                SelectOperator fop = (SelectOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null)

+                        break;

+                }

+            } else if (hiveOperator.getType() == OperatorType.EXTRACT) {

+                ExtractOperator fop = (ExtractOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null)

+                        break;

+                }

+            } else if (hiveOperator.getType() == OperatorType.GROUPBY) {

+                GroupByOperator fop = (GroupByOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null)

+                        break;

+                }

+            } else if (hiveOperator.getType() == OperatorType.TABLESCAN) {

+                TableScanOperator fop = (TableScanOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null)

+                        break;

+                }

+            } else if (hiveOperator.getType() == OperatorType.FILESINK) {

+                FileSinkOperator fop = (FileSinkOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null)

+                        break;

+                }

+            } else if (hiveOperator.getType() == OperatorType.LIMIT) {

+                LimitOperator lop = (LimitOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(lop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null)

+                        break;

+                }

+            } else if (hiveOperator.getType() == OperatorType.UDTF) {

+                UDTFOperator lop = (UDTFOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(lop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null)

+                        break;

+                }

+            } else if (hiveOperator.getType() == OperatorType.UNION) {

+                UnionOperator lop = (UnionOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(lop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null) {

+                        continueTraverse = true;

+                        break;

+                    } else

+                        continueTraverse = false;

+                }

+            } else

+                ;

+            if (hiveOperator.getChildOperators() != null && hiveOperator.getChildOperators().size() > 0

+                    && continueTraverse) {

+                @SuppressWarnings("unchecked")

+                List<Operator> children = hiveOperator.getChildOperators();

+                if (currentOperatorRef == null)

+                    currentOperatorRef = AlgebricksParentOperator;

+                translate(children, currentOperatorRef);

+            }

+            if (hiveOperator.getChildOperators() == null || hiveOperator.getChildOperators().size() == 0)

+                logicalOp.add(currentOperatorRef);

+        }

+        return logicalOp;

+    }

 

-	/**

-	 * used in select, group by to get no-column-expression columns

-	 * 

-	 * @param cols

-	 * @return

-	 */

-	public ILogicalOperator getAssignOperator(Mutable<ILogicalOperator> parent,

-			List<ExprNodeDesc> cols, ArrayList<LogicalVariable> variables) {

+    /**

+     * used in select, group by to get no-column-expression columns

+     * 

+     * @param cols

+     * @return

+     */

+    public ILogicalOperator getAssignOperator(Mutable<ILogicalOperator> parent, List<ExprNodeDesc> cols,

+            ArrayList<LogicalVariable> variables) {

 

-		ArrayList<Mutable<ILogicalExpression>> expressions = new ArrayList<Mutable<ILogicalExpression>>();

+        ArrayList<Mutable<ILogicalExpression>> expressions = new ArrayList<Mutable<ILogicalExpression>>();

 

-		/**

-		 * variables to be appended in the assign operator

-		 */

-		ArrayList<LogicalVariable> appendedVariables = new ArrayList<LogicalVariable>();

+        /**

+         * variables to be appended in the assign operator

+         */

+        ArrayList<LogicalVariable> appendedVariables = new ArrayList<LogicalVariable>();

 

-		// one variable can only be assigned once

-		for (ExprNodeDesc hiveExpr : cols) {

-			rewriteExpression(hiveExpr);

+        // one variable can only be assigned once

+        for (ExprNodeDesc hiveExpr : cols) {

+            rewriteExpression(hiveExpr);

 

-			if (hiveExpr instanceof ExprNodeColumnDesc) {

-				ExprNodeColumnDesc desc2 = (ExprNodeColumnDesc) hiveExpr;

-				String fieldName = desc2.getTabAlias() + "."

-						+ desc2.getColumn();

+            if (hiveExpr instanceof ExprNodeColumnDesc) {

+                ExprNodeColumnDesc desc2 = (ExprNodeColumnDesc) hiveExpr;

+                String fieldName = desc2.getTabAlias() + "." + desc2.getColumn();

 

-				// System.out.println("project expr: " + fieldName);

+                // System.out.println("project expr: " + fieldName);

 

-				if (fieldName.indexOf("$$") < 0) {

-					LogicalVariable var = getVariable(fieldName,

-							hiveExpr.getTypeInfo());

-					desc2.setColumn(var.toString());

-					desc2.setTabAlias("");

-					variables.add(var);

-				} else {

-					LogicalVariable var = nameToLogicalVariableMap.get(desc2

-							.getColumn());

-					String name = this.logicalVariableToFieldMap.get(var);

-					var = this.getVariableOnly(name);

-					variables.add(var);

-				}

-			} else {

-				Mutable<ILogicalExpression> asterixExpr = translateScalarFucntion(hiveExpr);

-				expressions.add(asterixExpr);

-				LogicalVariable var = getVariable(hiveExpr.getExprString()

-						+ asterixExpr.hashCode(), hiveExpr.getTypeInfo());

-				variables.add(var);

-				appendedVariables.add(var);

-			}

-		}

+                if (fieldName.indexOf("$$") < 0) {

+                    LogicalVariable var = getVariable(fieldName, hiveExpr.getTypeInfo());

+                    desc2.setColumn(var.toString());

+                    desc2.setTabAlias("");

+                    variables.add(var);

+                } else {

+                    LogicalVariable var = nameToLogicalVariableMap.get(desc2.getColumn());

+                    String name = this.logicalVariableToFieldMap.get(var);

+                    var = this.getVariableOnly(name);

+                    variables.add(var);

+                }

+            } else {

+                Mutable<ILogicalExpression> asterixExpr = translateScalarFucntion(hiveExpr);

+                expressions.add(asterixExpr);

+                LogicalVariable var = getVariable(hiveExpr.getExprString() + asterixExpr.hashCode(),

+                        hiveExpr.getTypeInfo());

+                variables.add(var);

+                appendedVariables.add(var);

+            }

+        }

 

-		/**

-		 * create an assign operator to deal with appending

-		 */

-		ILogicalOperator assignOp = null;

-		if (appendedVariables.size() > 0) {

-			assignOp = new AssignOperator(appendedVariables, expressions);

-			assignOp.getInputs().add(parent);

-		}

-		return assignOp;

-	}

+        /**

+         * create an assign operator to deal with appending

+         */

+        ILogicalOperator assignOp = null;

+        if (appendedVariables.size() > 0) {

+            assignOp = new AssignOperator(appendedVariables, expressions);

+            assignOp.getInputs().add(parent);

+        }

+        return assignOp;

+    }

 

-	private ILogicalPlan plan;

+    private ILogicalPlan plan;

 

-	public ILogicalPlan genLogicalPlan() {

-		plan = new ALogicalPlanImpl(rootOperators);

-		return plan;

-	}

+    public ILogicalPlan genLogicalPlan() {

+        plan = new ALogicalPlanImpl(rootOperators);

+        return plan;

+    }

 

-	public void printOperators() throws AlgebricksException {

-		LogicalOperatorPrettyPrintVisitor pvisitor = new LogicalOperatorPrettyPrintVisitor();

-		StringBuilder buffer = new StringBuilder();

-		PlanPrettyPrinter.printPlan(plan, buffer, pvisitor, 0);

-		outputWriter.println(buffer);

-		outputWriter.println("rewritten variables: ");

-		outputWriter.flush();

-		printVariables();

+    public void printOperators() throws AlgebricksException {

+        LogicalOperatorPrettyPrintVisitor pvisitor = new LogicalOperatorPrettyPrintVisitor();

+        StringBuilder buffer = new StringBuilder();

+        PlanPrettyPrinter.printPlan(plan, buffer, pvisitor, 0);

+        outputWriter.println(buffer);

+        outputWriter.println("rewritten variables: ");

+        outputWriter.flush();

+        printVariables();

 

-	}

+    }

 

-	public static void setOutputPrinter(PrintWriter writer) {

-		outputWriter = writer;

-	}

+    public static void setOutputPrinter(PrintWriter writer) {

+        outputWriter = writer;

+    }

 

-	private void printVariables() {

-		Set<Entry<String, LogicalVariable>> entries = fieldToLogicalVariableMap

-				.entrySet();

+    private void printVariables() {

+        Set<Entry<String, LogicalVariable>> entries = fieldToLogicalVariableMap.entrySet();

 

-		for (Entry<String, LogicalVariable> entry : entries) {

-			outputWriter.println(entry.getKey() + " -> " + entry.getValue());

-		}

-		outputWriter.flush();

-	}

+        for (Entry<String, LogicalVariable> entry : entries) {

+            outputWriter.println(entry.getKey() + " -> " + entry.getValue());

+        }

+        outputWriter.flush();

+    }

 

-	/**

-	 * generate the object inspector for the output of an operator

-	 * 

-	 * @param operator

-	 *            The Hive operator

-	 * @return an ObjectInspector object

-	 */

-	public Schema generateInputSchema(Operator operator) {

-		List<String> variableNames = new ArrayList<String>();

-		List<TypeInfo> typeList = new ArrayList<TypeInfo>();

-		List<ColumnInfo> columns = operator.getSchema().getSignature();

+    /**

+     * generate the object inspector for the output of an operator

+     * 

+     * @param operator

+     *            The Hive operator

+     * @return an ObjectInspector object

+     */

+    public Schema generateInputSchema(Operator operator) {

+        List<String> variableNames = new ArrayList<String>();

+        List<TypeInfo> typeList = new ArrayList<TypeInfo>();

+        List<ColumnInfo> columns = operator.getSchema().getSignature();

 

-		for (ColumnInfo col : columns) {

-			// typeList.add();

-			TypeInfo type = col.getType();

-			typeList.add(type);

+        for (ColumnInfo col : columns) {

+            // typeList.add();

+            TypeInfo type = col.getType();

+            typeList.add(type);

 

-			String fieldName = col.getInternalName();

-			variableNames.add(fieldName);

-		}

+            String fieldName = col.getInternalName();

+            variableNames.add(fieldName);

+        }

 

-		return new Schema(variableNames, typeList);

-	}

+        return new Schema(variableNames, typeList);

+    }

 

-	/**

-	 * rewrite the names of output columns for feature expression evaluators to

-	 * use

-	 * 

-	 * @param operator

-	 */

-	public void rewriteOperatorOutputSchema(Operator operator) {

-		List<ColumnInfo> columns = operator.getSchema().getSignature();

+    /**

+     * rewrite the names of output columns for feature expression evaluators to

+     * use

+     * 

+     * @param operator

+     */

+    public void rewriteOperatorOutputSchema(Operator operator) {

+        List<ColumnInfo> columns = operator.getSchema().getSignature();

 

-		for (ColumnInfo column : columns) {

-			String columnName = column.getTabAlias() + "."

-					+ column.getInternalName();

-			if (columnName.indexOf("$$") < 0) {

-				LogicalVariable var = getVariable(columnName, column.getType());

-				column.setInternalName(var.toString());

-			}

-		}

-	}

+        for (ColumnInfo column : columns) {

+            String columnName = column.getTabAlias() + "." + column.getInternalName();

+            if (columnName.indexOf("$$") < 0) {

+                LogicalVariable var = getVariable(columnName, column.getType());

+                column.setInternalName(var.toString());

+            }

+        }

+    }

 

-	@Override

-	public void rewriteOperatorOutputSchema(List<LogicalVariable> variables,

-			Operator operator) {

+    @Override

+    public void rewriteOperatorOutputSchema(List<LogicalVariable> variables, Operator operator) {

 

-		//printOperatorSchema(operator);

-		List<ColumnInfo> columns = operator.getSchema().getSignature();

-		if (variables.size() != columns.size()) {

-			throw new IllegalStateException("output cardinality error "

-					+ operator.getName() + " variable size: "

-					+ variables.size() + " expected " + columns.size());

-		}

+        //printOperatorSchema(operator);

+        List<ColumnInfo> columns = operator.getSchema().getSignature();

+        if (variables.size() != columns.size()) {

+            throw new IllegalStateException("output cardinality error " + operator.getName() + " variable size: "

+                    + variables.size() + " expected " + columns.size());

+        }

 

-		for (int i = 0; i < variables.size(); i++) {

-			LogicalVariable var = variables.get(i);

-			ColumnInfo column = columns.get(i);

-			String fieldName = column.getTabAlias() + "."

-					+ column.getInternalName();

-			if (fieldName.indexOf("$$") < 0) {

-				updateVariable(fieldName, var);

-				column.setInternalName(var.toString());

-			}

-		}

-		//printOperatorSchema(operator);

-	}

+        for (int i = 0; i < variables.size(); i++) {

+            LogicalVariable var = variables.get(i);

+            ColumnInfo column = columns.get(i);

+            String fieldName = column.getTabAlias() + "." + column.getInternalName();

+            if (fieldName.indexOf("$$") < 0) {

+                updateVariable(fieldName, var);

+                column.setInternalName(var.toString());

+            }

+        }

+        //printOperatorSchema(operator);

+    }

 

-	/**

-	 * rewrite an expression and substitute variables

-	 * 

-	 * @param expr

-	 *            hive expression

-	 */

-	public void rewriteExpression(ExprNodeDesc expr) {

-		if (expr instanceof ExprNodeColumnDesc) {

-			ExprNodeColumnDesc desc = (ExprNodeColumnDesc) expr;

-			String fieldName = desc.getTabAlias() + "." + desc.getColumn();

-			if (fieldName.indexOf("$$") < 0) {

-				LogicalVariable var = getVariableOnly(fieldName);

-				if (var == null) {

-					fieldName = "." + desc.getColumn();

-					var = getVariableOnly(fieldName);

-					if (var == null) {

-						fieldName = "null." + desc.getColumn();

-						var = getVariableOnly(fieldName);

-						if (var == null) {

-							throw new IllegalStateException(fieldName

-									+ " is wrong!!! ");

-						}

-					}

-				}

-				String name = this.logicalVariableToFieldMap.get(var);

-				var = getVariableOnly(name);

-				desc.setColumn(var.toString());

-			}

-		} else {

-			if (expr.getChildren() != null && expr.getChildren().size() > 0) {

-				List<ExprNodeDesc> children = expr.getChildren();

-				for (ExprNodeDesc desc : children)

-					rewriteExpression(desc);

-			}

-		}

-	}

+    /**

+     * rewrite an expression and substitute variables

+     * 

+     * @param expr

+     *            hive expression

+     */

+    public void rewriteExpression(ExprNodeDesc expr) {

+        if (expr instanceof ExprNodeColumnDesc) {

+            ExprNodeColumnDesc desc = (ExprNodeColumnDesc) expr;

+            String fieldName = desc.getTabAlias() + "." + desc.getColumn();

+            if (fieldName.indexOf("$$") < 0) {

+                LogicalVariable var = getVariableOnly(fieldName);

+                if (var == null) {

+                    fieldName = "." + desc.getColumn();

+                    var = getVariableOnly(fieldName);

+                    if (var == null) {

+                        fieldName = "null." + desc.getColumn();

+                        var = getVariableOnly(fieldName);

+                        if (var == null) {

+                            throw new IllegalStateException(fieldName + " is wrong!!! ");

+                        }

+                    }

+                }

+                String name = this.logicalVariableToFieldMap.get(var);

+                var = getVariableOnly(name);

+                desc.setColumn(var.toString());

+            }

+        } else {

+            if (expr.getChildren() != null && expr.getChildren().size() > 0) {

+                List<ExprNodeDesc> children = expr.getChildren();

+                for (ExprNodeDesc desc : children)

+                    rewriteExpression(desc);

+            }

+        }

+    }

 

-	/**

-	 * rewrite an expression and substitute variables

-	 * 

-	 * @param expr

-	 *            hive expression

-	 */

-	public void rewriteExpressionPartial(ExprNodeDesc expr) {

-		if (expr instanceof ExprNodeColumnDesc) {

-			ExprNodeColumnDesc desc = (ExprNodeColumnDesc) expr;

-			String fieldName = desc.getTabAlias() + "." + desc.getColumn();

-			if (fieldName.indexOf("$$") < 0) {

-				LogicalVariable var = getVariableOnly(fieldName);

-				desc.setColumn(var.toString());

-			}

-		} else {

-			if (expr.getChildren() != null && expr.getChildren().size() > 0) {

-				List<ExprNodeDesc> children = expr.getChildren();

-				for (ExprNodeDesc desc : children)

-					rewriteExpressionPartial(desc);

-			}

-		}

-	}

+    /**

+     * rewrite an expression and substitute variables

+     * 

+     * @param expr

+     *            hive expression

+     */

+    public void rewriteExpressionPartial(ExprNodeDesc expr) {

+        if (expr instanceof ExprNodeColumnDesc) {

+            ExprNodeColumnDesc desc = (ExprNodeColumnDesc) expr;

+            String fieldName = desc.getTabAlias() + "." + desc.getColumn();

+            if (fieldName.indexOf("$$") < 0) {

+                LogicalVariable var = getVariableOnly(fieldName);

+                desc.setColumn(var.toString());

+            }

+        } else {

+            if (expr.getChildren() != null && expr.getChildren().size() > 0) {

+                List<ExprNodeDesc> children = expr.getChildren();

+                for (ExprNodeDesc desc : children)

+                    rewriteExpressionPartial(desc);

+            }

+        }

+    }

 

-	// private void printOperatorSchema(Operator operator) {

-	// // System.out.println(operator.getName());

-	// // List<ColumnInfo> columns = operator.getSchema().getSignature();

-	// // for (ColumnInfo column : columns) {

-	// // System.out.print(column.getTabAlias() + "." +

-	// // column.getInternalName() + "  ");

-	// // }

-	// // System.out.println();

-	// }

+    // private void printOperatorSchema(Operator operator) {

+    // // System.out.println(operator.getName());

+    // // List<ColumnInfo> columns = operator.getSchema().getSignature();

+    // // for (ColumnInfo column : columns) {

+    // // System.out.print(column.getTabAlias() + "." +

+    // // column.getInternalName() + "  ");

+    // // }

+    // // System.out.println();

+    // }

 

-	/**

-	 * translate scalar function expression

-	 * 

-	 * @param hiveExpr

-	 * @return

-	 */

-	public Mutable<ILogicalExpression> translateScalarFucntion(

-			ExprNodeDesc hiveExpr) {

-		ILogicalExpression AlgebricksExpr;

+    /**

+     * translate scalar function expression

+     * 

+     * @param hiveExpr

+     * @return

+     */

+    public Mutable<ILogicalExpression> translateScalarFucntion(ExprNodeDesc hiveExpr) {

+        ILogicalExpression AlgebricksExpr;

 

-		if (hiveExpr instanceof ExprNodeGenericFuncDesc) {

-			List<Mutable<ILogicalExpression>> arguments = new ArrayList<Mutable<ILogicalExpression>>();

-			List<ExprNodeDesc> children = hiveExpr.getChildren();

+        if (hiveExpr instanceof ExprNodeGenericFuncDesc) {

+            List<Mutable<ILogicalExpression>> arguments = new ArrayList<Mutable<ILogicalExpression>>();

+            List<ExprNodeDesc> children = hiveExpr.getChildren();

 

-			for (ExprNodeDesc child : children)

-				arguments.add(translateScalarFucntion(child));

+            for (ExprNodeDesc child : children)

+                arguments.add(translateScalarFucntion(child));

 

-			ExprNodeGenericFuncDesc funcExpr = (ExprNodeGenericFuncDesc) hiveExpr;

-			GenericUDF genericUdf = funcExpr.getGenericUDF();

-			UDF udf = null;

-			if (genericUdf instanceof GenericUDFBridge) {

-				GenericUDFBridge bridge = (GenericUDFBridge) genericUdf;

-				try {

-					udf = bridge.getUdfClass().newInstance();

-				} catch (Exception e) {

-					e.printStackTrace();

-				}

-			}

+            ExprNodeGenericFuncDesc funcExpr = (ExprNodeGenericFuncDesc) hiveExpr;

+            GenericUDF genericUdf = funcExpr.getGenericUDF();

+            UDF udf = null;

+            if (genericUdf instanceof GenericUDFBridge) {

+                GenericUDFBridge bridge = (GenericUDFBridge) genericUdf;

+                try {

+                    udf = bridge.getUdfClass().newInstance();

+                } catch (Exception e) {

+                    e.printStackTrace();

+                }

+            }

 

-			/**

-			 * set up the hive function

-			 */

-			Object hiveFunction = genericUdf;

-			if (udf != null)

-				hiveFunction = udf;

+            /**

+             * set up the hive function

+             */

+            Object hiveFunction = genericUdf;

+            if (udf != null)

+                hiveFunction = udf;

 

-			FunctionIdentifier funcId = HiveAlgebricksBuiltInFunctionMap.INSTANCE

-					.getAlgebricksFunctionId(hiveFunction.getClass());

-			if (funcId == null) {

-				funcId = new FunctionIdentifier(ExpressionConstant.NAMESPACE,

-						hiveFunction.getClass().getName());

-			}

+            FunctionIdentifier funcId = HiveAlgebricksBuiltInFunctionMap.INSTANCE.getAlgebricksFunctionId(hiveFunction

+                    .getClass());

+            if (funcId == null) {

+                funcId = new FunctionIdentifier(ExpressionConstant.NAMESPACE, hiveFunction.getClass().getName());

+            }

 

-			Object functionInfo = null;

-			if (genericUdf instanceof GenericUDFBridge) {

-				functionInfo = funcExpr;

-			}

+            Object functionInfo = null;

+            if (genericUdf instanceof GenericUDFBridge) {

+                functionInfo = funcExpr;

+            }

 

-			/**

-			 * generate the function call expression

-			 */

-			ScalarFunctionCallExpression AlgebricksFuncExpr = new ScalarFunctionCallExpression(

-					new HiveFunctionInfo(funcId, functionInfo), arguments);

-			AlgebricksExpr = AlgebricksFuncExpr;

+            /**

+             * generate the function call expression

+             */

+            ScalarFunctionCallExpression AlgebricksFuncExpr = new ScalarFunctionCallExpression(new HiveFunctionInfo(

+                    funcId, functionInfo), arguments);

+            AlgebricksExpr = AlgebricksFuncExpr;

 

-		} else if (hiveExpr instanceof ExprNodeColumnDesc) {

-			ExprNodeColumnDesc column = (ExprNodeColumnDesc) hiveExpr;

-			LogicalVariable var = this.getVariable(column.getColumn());

-			AlgebricksExpr = new VariableReferenceExpression(var);

+        } else if (hiveExpr instanceof ExprNodeColumnDesc) {

+            ExprNodeColumnDesc column = (ExprNodeColumnDesc) hiveExpr;

+            LogicalVariable var = this.getVariable(column.getColumn());

+            AlgebricksExpr = new VariableReferenceExpression(var);

 

-		} else if (hiveExpr instanceof ExprNodeFieldDesc) {

-			FunctionIdentifier funcId;

-			funcId = new FunctionIdentifier(ExpressionConstant.NAMESPACE,

-					ExpressionConstant.FIELDACCESS);

+        } else if (hiveExpr instanceof ExprNodeFieldDesc) {

+            FunctionIdentifier funcId;

+            funcId = new FunctionIdentifier(ExpressionConstant.NAMESPACE, ExpressionConstant.FIELDACCESS);

 

-			ScalarFunctionCallExpression AlgebricksFuncExpr = new ScalarFunctionCallExpression(

-					new HiveFunctionInfo(funcId, hiveExpr));

-			AlgebricksExpr = AlgebricksFuncExpr;

-		} else if (hiveExpr instanceof ExprNodeConstantDesc) {

-			ExprNodeConstantDesc hiveConst = (ExprNodeConstantDesc) hiveExpr;

-			Object value = hiveConst.getValue();

-			AlgebricksExpr = new ConstantExpression(

-					new HivesterixConstantValue(value));

-		} else if (hiveExpr instanceof ExprNodeNullDesc) {

-			FunctionIdentifier funcId;

-			funcId = new FunctionIdentifier(ExpressionConstant.NAMESPACE,

-					ExpressionConstant.NULL);

+            ScalarFunctionCallExpression AlgebricksFuncExpr = new ScalarFunctionCallExpression(new HiveFunctionInfo(

+                    funcId, hiveExpr));

+            AlgebricksExpr = AlgebricksFuncExpr;

+        } else if (hiveExpr instanceof ExprNodeConstantDesc) {

+            ExprNodeConstantDesc hiveConst = (ExprNodeConstantDesc) hiveExpr;

+            Object value = hiveConst.getValue();

+            AlgebricksExpr = new ConstantExpression(new HivesterixConstantValue(value));

+        } else if (hiveExpr instanceof ExprNodeNullDesc) {

+            FunctionIdentifier funcId;

+            funcId = new FunctionIdentifier(ExpressionConstant.NAMESPACE, ExpressionConstant.NULL);

 

-			ScalarFunctionCallExpression AlgebricksFuncExpr = new ScalarFunctionCallExpression(

-					new HiveFunctionInfo(funcId, hiveExpr));

+            ScalarFunctionCallExpression AlgebricksFuncExpr = new ScalarFunctionCallExpression(new HiveFunctionInfo(

+                    funcId, hiveExpr));

 

-			AlgebricksExpr = AlgebricksFuncExpr;

-		} else {

-			throw new IllegalStateException("unknown hive expression");

-		}

-		return new MutableObject<ILogicalExpression>(AlgebricksExpr);

-	}

+            AlgebricksExpr = AlgebricksFuncExpr;

+        } else {

+            throw new IllegalStateException("unknown hive expression");

+        }

+        return new MutableObject<ILogicalExpression>(AlgebricksExpr);

+    }

 

-	/**

-	 * translate aggregation function expression

-	 * 

-	 * @param aggregateDesc

-	 * @return

-	 */

-	public Mutable<ILogicalExpression> translateAggregation(

-			AggregationDesc aggregateDesc) {

+    /**

+     * translate aggregation function expression

+     * 

+     * @param aggregateDesc

+     * @return

+     */

+    public Mutable<ILogicalExpression> translateAggregation(AggregationDesc aggregateDesc) {

 

-		String UDAFName = aggregateDesc.getGenericUDAFName();

+        String UDAFName = aggregateDesc.getGenericUDAFName();

 

-		List<Mutable<ILogicalExpression>> arguments = new ArrayList<Mutable<ILogicalExpression>>();

-		List<ExprNodeDesc> children = aggregateDesc.getParameters();

+        List<Mutable<ILogicalExpression>> arguments = new ArrayList<Mutable<ILogicalExpression>>();

+        List<ExprNodeDesc> children = aggregateDesc.getParameters();

 

-		for (ExprNodeDesc child : children)

-			arguments.add(translateScalarFucntion(child));

+        for (ExprNodeDesc child : children)

+            arguments.add(translateScalarFucntion(child));

 

-		FunctionIdentifier funcId = new FunctionIdentifier(

-				ExpressionConstant.NAMESPACE, UDAFName + "("

-						+ aggregateDesc.getMode() + ")");

-		HiveFunctionInfo funcInfo = new HiveFunctionInfo(funcId, aggregateDesc);

-		AggregateFunctionCallExpression aggregationExpression = new AggregateFunctionCallExpression(

-				funcInfo, false, arguments);

-		return new MutableObject<ILogicalExpression>(aggregationExpression);

-	}

+        FunctionIdentifier funcId = new FunctionIdentifier(ExpressionConstant.NAMESPACE, UDAFName + "("

+                + aggregateDesc.getMode() + ")");

+        HiveFunctionInfo funcInfo = new HiveFunctionInfo(funcId, aggregateDesc);

+        AggregateFunctionCallExpression aggregationExpression = new AggregateFunctionCallExpression(funcInfo, false,

+                arguments);

+        return new MutableObject<ILogicalExpression>(aggregationExpression);

+    }

 

-	/**

-	 * translate aggregation function expression

-	 * 

-	 * @param aggregator

-	 * @return

-	 */

-	public Mutable<ILogicalExpression> translateUnnestFunction(

-			UDTFDesc udtfDesc, Mutable<ILogicalExpression> argument) {

+    /**

+     * translate aggregation function expression

+     * 

+     * @param aggregator

+     * @return

+     */

+    public Mutable<ILogicalExpression> translateUnnestFunction(UDTFDesc udtfDesc, Mutable<ILogicalExpression> argument) {

 

-		String UDTFName = udtfDesc.getUDTFName();

+        String UDTFName = udtfDesc.getUDTFName();

 

-		FunctionIdentifier funcId = new FunctionIdentifier(

-				ExpressionConstant.NAMESPACE, UDTFName);

-		UnnestingFunctionCallExpression unnestingExpression = new UnnestingFunctionCallExpression(

-				new HiveFunctionInfo(funcId, udtfDesc));

-		unnestingExpression.getArguments().add(argument);

-		return new MutableObject<ILogicalExpression>(unnestingExpression);

-	}

+        FunctionIdentifier funcId = new FunctionIdentifier(ExpressionConstant.NAMESPACE, UDTFName);

+        UnnestingFunctionCallExpression unnestingExpression = new UnnestingFunctionCallExpression(new HiveFunctionInfo(

+                funcId, udtfDesc));

+        unnestingExpression.getArguments().add(argument);

+        return new MutableObject<ILogicalExpression>(unnestingExpression);

+    }

 

-	/**

-	 * get typeinfo

-	 */

-	@Override

-	public TypeInfo getType(LogicalVariable var) {

-		return variableToType.get(var);

-	}

+    /**

+     * get typeinfo

+     */

+    @Override

+    public TypeInfo getType(LogicalVariable var) {

+        return variableToType.get(var);

+    }

 

-	/**

-	 * get variable from variable name

-	 */

-	@Override

-	public LogicalVariable getVariable(String name) {

-		return nameToLogicalVariableMap.get(name);

-	}

+    /**

+     * get variable from variable name

+     */

+    @Override

+    public LogicalVariable getVariable(String name) {

+        return nameToLogicalVariableMap.get(name);

+    }

 

-	@Override

-	public LogicalVariable getVariableFromFieldName(String fieldName) {

-		return this.getVariableOnly(fieldName);

-	}

+    @Override

+    public LogicalVariable getVariableFromFieldName(String fieldName) {

+        return this.getVariableOnly(fieldName);

+    }

 

-	/**

-	 * set the metadata provider

-	 */

-	@Override

-	public void setMetadataProvider(

-			IMetadataProvider<PartitionDesc, Object> metadata) {

-		this.metaData = metadata;

-	}

+    /**

+     * set the metadata provider

+     */

+    @Override

+    public void setMetadataProvider(IMetadataProvider<PartitionDesc, Object> metadata) {

+        this.metaData = metadata;

+    }

 

-	/**

-	 * insert ReplicateOperator when necessary

-	 */

-	private void insertReplicateOperator(List<Mutable<ILogicalOperator>> roots) {

-		Map<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>> childToParentsMap = new HashMap<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>>();

-		buildChildToParentsMapping(roots, childToParentsMap);

-		for (Entry<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>> entry : childToParentsMap

-				.entrySet()) {

-			List<Mutable<ILogicalOperator>> pList = entry.getValue();

-			if (pList.size() > 1) {

-				ILogicalOperator rop = new ReplicateOperator(pList.size());

-				Mutable<ILogicalOperator> ropRef = new MutableObject<ILogicalOperator>(

-						rop);

-				Mutable<ILogicalOperator> childRef = entry.getKey();

-				rop.getInputs().add(childRef);

-				for (Mutable<ILogicalOperator> parentRef : pList) {

-					ILogicalOperator parentOp = parentRef.getValue();

-					int index = parentOp.getInputs().indexOf(childRef);

-					parentOp.getInputs().set(index, ropRef);

-				}

-			}

-		}

-	}

+    /**

+     * insert ReplicateOperator when necessary

+     */

+    private void insertReplicateOperator(List<Mutable<ILogicalOperator>> roots) {

+        Map<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>> childToParentsMap = new HashMap<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>>();

+        buildChildToParentsMapping(roots, childToParentsMap);

+        for (Entry<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>> entry : childToParentsMap.entrySet()) {

+            List<Mutable<ILogicalOperator>> pList = entry.getValue();

+            if (pList.size() > 1) {

+                ILogicalOperator rop = new ReplicateOperator(pList.size());

+                Mutable<ILogicalOperator> ropRef = new MutableObject<ILogicalOperator>(rop);

+                Mutable<ILogicalOperator> childRef = entry.getKey();

+                rop.getInputs().add(childRef);

+                for (Mutable<ILogicalOperator> parentRef : pList) {

+                    ILogicalOperator parentOp = parentRef.getValue();

+                    int index = parentOp.getInputs().indexOf(childRef);

+                    parentOp.getInputs().set(index, ropRef);

+                }

+            }

+        }

+    }

 

-	/**

-	 * build the mapping from child to Parents

-	 * 

-	 * @param roots

-	 * @param childToParentsMap

-	 */

-	private void buildChildToParentsMapping(

-			List<Mutable<ILogicalOperator>> roots,

-			Map<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>> map) {

-		for (Mutable<ILogicalOperator> opRef : roots) {

-			List<Mutable<ILogicalOperator>> childRefs = opRef.getValue()

-					.getInputs();

-			for (Mutable<ILogicalOperator> childRef : childRefs) {

-				List<Mutable<ILogicalOperator>> parentList = map.get(childRef);

-				if (parentList == null) {

-					parentList = new ArrayList<Mutable<ILogicalOperator>>();

-					map.put(childRef, parentList);

-				}

-				if (!parentList.contains(opRef))

-					parentList.add(opRef);

-			}

-			buildChildToParentsMapping(childRefs, map);

-		}

-	}

+    /**

+     * build the mapping from child to Parents

+     * 

+     * @param roots

+     * @param childToParentsMap

+     */

+    private void buildChildToParentsMapping(List<Mutable<ILogicalOperator>> roots,

+            Map<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>> map) {

+        for (Mutable<ILogicalOperator> opRef : roots) {

+            List<Mutable<ILogicalOperator>> childRefs = opRef.getValue().getInputs();

+            for (Mutable<ILogicalOperator> childRef : childRefs) {

+                List<Mutable<ILogicalOperator>> parentList = map.get(childRef);

+                if (parentList == null) {

+                    parentList = new ArrayList<Mutable<ILogicalOperator>>();

+                    map.put(childRef, parentList);

+                }

+                if (!parentList.contains(opRef))

+                    parentList.add(opRef);

+            }

+            buildChildToParentsMapping(childRefs, map);

+        }

+    }

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveLogicalPlanAndMetaData.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveLogicalPlanAndMetaData.java
index 494e796..d5801a3 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveLogicalPlanAndMetaData.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveLogicalPlanAndMetaData.java
@@ -8,29 +8,28 @@
 @SuppressWarnings({ "rawtypes", "unchecked" })

 public class HiveLogicalPlanAndMetaData implements ILogicalPlanAndMetadata {

 

-	IMetadataProvider metadata;

-	ILogicalPlan plan;

+    IMetadataProvider metadata;

+    ILogicalPlan plan;

 

-	public HiveLogicalPlanAndMetaData(ILogicalPlan plan,

-			IMetadataProvider metadata) {

-		this.plan = plan;

-		this.metadata = metadata;

-	}

+    public HiveLogicalPlanAndMetaData(ILogicalPlan plan, IMetadataProvider metadata) {

+        this.plan = plan;

+        this.metadata = metadata;

+    }

 

-	@Override

-	public IMetadataProvider getMetadataProvider() {

-		return metadata;

-	}

+    @Override

+    public IMetadataProvider getMetadataProvider() {

+        return metadata;

+    }

 

-	@Override

-	public ILogicalPlan getPlan() {

-		return plan;

-	}

+    @Override

+    public ILogicalPlan getPlan() {

+        return plan;

+    }

 

-	@Override

-	public AlgebricksPartitionConstraint getClusterLocations() {

-		// TODO Auto-generated method stub

-		return null;

-	}

+    @Override

+    public AlgebricksPartitionConstraint getClusterLocations() {

+        // TODO Auto-generated method stub

+        return null;

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveOperatorAnnotations.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveOperatorAnnotations.java
index 0d234fb..0ea4e01 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveOperatorAnnotations.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveOperatorAnnotations.java
@@ -2,7 +2,7 @@
 
 public class HiveOperatorAnnotations {
 
-	// hints
-	public static final String LOCAL_GROUP_BY = "LOCAL_GROUP_BY";
+    // hints
+    public static final String LOCAL_GROUP_BY = "LOCAL_GROUP_BY";
 
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/ExtractVisitor.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/ExtractVisitor.java
index 9a84164..0266a60 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/ExtractVisitor.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/ExtractVisitor.java
@@ -13,16 +13,14 @@
 

 public class ExtractVisitor extends DefaultVisitor {

 

-	@Override

-	public Mutable<ILogicalOperator> visit(ExtractOperator operator,

-			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) {

-		Schema currentSchema = t.generateInputSchema(operator

-				.getParentOperators().get(0));

-		operator.setSchema(operator.getParentOperators().get(0).getSchema());

-		List<LogicalVariable> latestOutputSchema = t

-				.getVariablesFromSchema(currentSchema);

-		t.rewriteOperatorOutputSchema(latestOutputSchema, operator);

-		return null;

-	}

+    @Override

+    public Mutable<ILogicalOperator> visit(ExtractOperator operator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) {

+        Schema currentSchema = t.generateInputSchema(operator.getParentOperators().get(0));

+        operator.setSchema(operator.getParentOperators().get(0).getSchema());

+        List<LogicalVariable> latestOutputSchema = t.getVariablesFromSchema(currentSchema);

+        t.rewriteOperatorOutputSchema(latestOutputSchema, operator);

+        return null;

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/FilterVisitor.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/FilterVisitor.java
index b276ba9..9541da8 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/FilterVisitor.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/FilterVisitor.java
@@ -18,27 +18,24 @@
 

 public class FilterVisitor extends DefaultVisitor {

 

-	@Override

-	public Mutable<ILogicalOperator> visit(FilterOperator operator,

-			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) {

-		Schema currentSchema = t.generateInputSchema(operator

-				.getParentOperators().get(0));

+    @Override

+    public Mutable<ILogicalOperator> visit(FilterOperator operator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) {

+        Schema currentSchema = t.generateInputSchema(operator.getParentOperators().get(0));

 

-		FilterDesc desc = (FilterDesc) operator.getConf();

-		ExprNodeDesc predicate = desc.getPredicate();

-		t.rewriteExpression(predicate);

+        FilterDesc desc = (FilterDesc) operator.getConf();

+        ExprNodeDesc predicate = desc.getPredicate();

+        t.rewriteExpression(predicate);

 

-		Mutable<ILogicalExpression> exprs = t.translateScalarFucntion(desc

-				.getPredicate());

-		ILogicalOperator currentOperator = new SelectOperator(exprs);

-		currentOperator.getInputs().add(AlgebricksParentOperatorRef);

+        Mutable<ILogicalExpression> exprs = t.translateScalarFucntion(desc.getPredicate());

+        ILogicalOperator currentOperator = new SelectOperator(exprs);

+        currentOperator.getInputs().add(AlgebricksParentOperatorRef);

 

-		// populate the schema from upstream operator

-		operator.setSchema(operator.getParentOperators().get(0).getSchema());

-		List<LogicalVariable> latestOutputSchema = t

-				.getVariablesFromSchema(currentSchema);

-		t.rewriteOperatorOutputSchema(latestOutputSchema, operator);

-		return new MutableObject<ILogicalOperator>(currentOperator);

-	}

+        // populate the schema from upstream operator

+        operator.setSchema(operator.getParentOperators().get(0).getSchema());

+        List<LogicalVariable> latestOutputSchema = t.getVariablesFromSchema(currentSchema);

+        t.rewriteOperatorOutputSchema(latestOutputSchema, operator);

+        return new MutableObject<ILogicalOperator>(currentOperator);

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/GroupByVisitor.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/GroupByVisitor.java
index d2180a3..8e7bfc6 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/GroupByVisitor.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/GroupByVisitor.java
@@ -47,245 +47,218 @@
 @SuppressWarnings({ "rawtypes", "unchecked" })

 public class GroupByVisitor extends DefaultVisitor {

 

-	private List<Mutable<ILogicalExpression>> AlgebricksAggs = new ArrayList<Mutable<ILogicalExpression>>();

-	private List<IFunctionInfo> localAggs = new ArrayList<IFunctionInfo>();

-	private boolean isDistinct = false;

-	private boolean gbyKeyNotRedKey = false;

+    private List<Mutable<ILogicalExpression>> AlgebricksAggs = new ArrayList<Mutable<ILogicalExpression>>();

+    private List<IFunctionInfo> localAggs = new ArrayList<IFunctionInfo>();

+    private boolean isDistinct = false;

+    private boolean gbyKeyNotRedKey = false;

 

-	@Override

-	public Mutable<ILogicalOperator> visit(GroupByOperator operator,

-			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

-			throws AlgebricksException {

+    @Override

+    public Mutable<ILogicalOperator> visit(GroupByOperator operator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException {

 

-		// get descriptors

-		GroupByDesc desc = (GroupByDesc) operator.getConf();

-		GroupByDesc.Mode mode = desc.getMode();

+        // get descriptors

+        GroupByDesc desc = (GroupByDesc) operator.getConf();

+        GroupByDesc.Mode mode = desc.getMode();

 

-		List<ExprNodeDesc> keys = desc.getKeys();

-		List<AggregationDesc> aggregators = desc.getAggregators();

+        List<ExprNodeDesc> keys = desc.getKeys();

+        List<AggregationDesc> aggregators = desc.getAggregators();

 

-		Operator child = operator.getChildOperators().get(0);

+        Operator child = operator.getChildOperators().get(0);

 

-		if (child.getType() == OperatorType.REDUCESINK) {

-			List<ExprNodeDesc> partKeys = ((ReduceSinkDesc) child.getConf())

-					.getPartitionCols();

-			if (keys.size() != partKeys.size())

-				gbyKeyNotRedKey = true;

-		}

+        if (child.getType() == OperatorType.REDUCESINK) {

+            List<ExprNodeDesc> partKeys = ((ReduceSinkDesc) child.getConf()).getPartitionCols();

+            if (keys.size() != partKeys.size())

+                gbyKeyNotRedKey = true;

+        }

 

-		if (mode == GroupByDesc.Mode.PARTIAL1 || mode == GroupByDesc.Mode.HASH

-				|| mode == GroupByDesc.Mode.COMPLETE

-				|| (aggregators.size() == 0 && isDistinct == false)

-				|| gbyKeyNotRedKey) {

-			AlgebricksAggs.clear();

-			// add an assign operator if the key is not a column expression

-			ArrayList<LogicalVariable> keyVariables = new ArrayList<LogicalVariable>();

-			ILogicalOperator currentOperator = null;

-			ILogicalOperator assignOperator = t.getAssignOperator(

-					AlgebricksParentOperatorRef, keys, keyVariables);

-			if (assignOperator != null) {

-				currentOperator = assignOperator;

-				AlgebricksParentOperatorRef = new MutableObject<ILogicalOperator>(

-						currentOperator);

-			}

+        if (mode == GroupByDesc.Mode.PARTIAL1 || mode == GroupByDesc.Mode.HASH || mode == GroupByDesc.Mode.COMPLETE

+                || (aggregators.size() == 0 && isDistinct == false) || gbyKeyNotRedKey) {

+            AlgebricksAggs.clear();

+            // add an assign operator if the key is not a column expression

+            ArrayList<LogicalVariable> keyVariables = new ArrayList<LogicalVariable>();

+            ILogicalOperator currentOperator = null;

+            ILogicalOperator assignOperator = t.getAssignOperator(AlgebricksParentOperatorRef, keys, keyVariables);

+            if (assignOperator != null) {

+                currentOperator = assignOperator;

+                AlgebricksParentOperatorRef = new MutableObject<ILogicalOperator>(currentOperator);

+            }

 

-			// get key variable expression list

-			List<Mutable<ILogicalExpression>> keyExprs = new ArrayList<Mutable<ILogicalExpression>>();

-			for (LogicalVariable var : keyVariables) {

-				keyExprs.add(t.translateScalarFucntion(new ExprNodeColumnDesc(

-						TypeInfoFactory.intTypeInfo, var.toString(), "", false)));

-			}

+            // get key variable expression list

+            List<Mutable<ILogicalExpression>> keyExprs = new ArrayList<Mutable<ILogicalExpression>>();

+            for (LogicalVariable var : keyVariables) {

+                keyExprs.add(t.translateScalarFucntion(new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo, var

+                        .toString(), "", false)));

+            }

 

-			if (aggregators.size() == 0) {

-				List<Mutable<ILogicalExpression>> distinctExprs = new ArrayList<Mutable<ILogicalExpression>>();

-				for (LogicalVariable var : keyVariables) {

-					Mutable<ILogicalExpression> varExpr = new MutableObject<ILogicalExpression>(

-							new VariableReferenceExpression(var));

-					distinctExprs.add(varExpr);

-				}

-				t.rewriteOperatorOutputSchema(keyVariables, operator);

-				isDistinct = true;

-				ILogicalOperator lop = new DistinctOperator(distinctExprs);

-				lop.getInputs().add(AlgebricksParentOperatorRef);

-				return new MutableObject<ILogicalOperator>(lop);

-			}

+            if (aggregators.size() == 0) {

+                List<Mutable<ILogicalExpression>> distinctExprs = new ArrayList<Mutable<ILogicalExpression>>();

+                for (LogicalVariable var : keyVariables) {

+                    Mutable<ILogicalExpression> varExpr = new MutableObject<ILogicalExpression>(

+                            new VariableReferenceExpression(var));

+                    distinctExprs.add(varExpr);

+                }

+                t.rewriteOperatorOutputSchema(keyVariables, operator);

+                isDistinct = true;

+                ILogicalOperator lop = new DistinctOperator(distinctExprs);

+                lop.getInputs().add(AlgebricksParentOperatorRef);

+                return new MutableObject<ILogicalOperator>(lop);

+            }

 

-			// get the pair<LogicalVariable, ILogicalExpression> list

-			List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> keyParameters = new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>();

-			keyVariables.clear();

-			for (Mutable<ILogicalExpression> expr : keyExprs) {

-				LogicalVariable keyVar = t.getVariable(expr.getValue()

-						.toString(), TypeInfoFactory.unknownTypeInfo);

-				keyParameters.add(new Pair(keyVar, expr));

-				keyVariables.add(keyVar);

-			}

+            // get the pair<LogicalVariable, ILogicalExpression> list

+            List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> keyParameters = new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>();

+            keyVariables.clear();

+            for (Mutable<ILogicalExpression> expr : keyExprs) {

+                LogicalVariable keyVar = t.getVariable(expr.getValue().toString(), TypeInfoFactory.unknownTypeInfo);

+                keyParameters.add(new Pair(keyVar, expr));

+                keyVariables.add(keyVar);

+            }

 

-			// get the parameters for the aggregator operator

-			ArrayList<LogicalVariable> aggVariables = new ArrayList<LogicalVariable>();

-			ArrayList<Mutable<ILogicalExpression>> aggExprs = new ArrayList<Mutable<ILogicalExpression>>();

+            // get the parameters for the aggregator operator

+            ArrayList<LogicalVariable> aggVariables = new ArrayList<LogicalVariable>();

+            ArrayList<Mutable<ILogicalExpression>> aggExprs = new ArrayList<Mutable<ILogicalExpression>>();

 

-			// get the type of each aggregation function

-			HashMap<AggregationDesc, TypeInfo> aggToType = new HashMap<AggregationDesc, TypeInfo>();

-			List<ColumnInfo> columns = operator.getSchema().getSignature();

-			int offset = keys.size();

-			for (int i = offset; i < columns.size(); i++) {

-				aggToType.put(aggregators.get(i - offset), columns.get(i)

-						.getType());

-			}

+            // get the type of each aggregation function

+            HashMap<AggregationDesc, TypeInfo> aggToType = new HashMap<AggregationDesc, TypeInfo>();

+            List<ColumnInfo> columns = operator.getSchema().getSignature();

+            int offset = keys.size();

+            for (int i = offset; i < columns.size(); i++) {

+                aggToType.put(aggregators.get(i - offset), columns.get(i).getType());

+            }

 

-			localAggs.clear();

-			// rewrite parameter expressions for all aggregators

-			for (AggregationDesc aggregator : aggregators) {

-				for (ExprNodeDesc parameter : aggregator.getParameters()) {

-					t.rewriteExpression(parameter);

-				}

-				Mutable<ILogicalExpression> aggExpr = t

-						.translateAggregation(aggregator);

-				AbstractFunctionCallExpression localAggExpr = (AbstractFunctionCallExpression) aggExpr

-						.getValue();

-				localAggs.add(localAggExpr.getFunctionInfo());

+            localAggs.clear();

+            // rewrite parameter expressions for all aggregators

+            for (AggregationDesc aggregator : aggregators) {

+                for (ExprNodeDesc parameter : aggregator.getParameters()) {

+                    t.rewriteExpression(parameter);

+                }

+                Mutable<ILogicalExpression> aggExpr = t.translateAggregation(aggregator);

+                AbstractFunctionCallExpression localAggExpr = (AbstractFunctionCallExpression) aggExpr.getValue();

+                localAggs.add(localAggExpr.getFunctionInfo());

 

-				AggregationDesc logicalAgg = new AggregationDesc(

-						aggregator.getGenericUDAFName(),

-						aggregator.getGenericUDAFEvaluator(),

-						aggregator.getParameters(), aggregator.getDistinct(),

-						Mode.COMPLETE);

-				Mutable<ILogicalExpression> logicalAggExpr = t

-						.translateAggregation(logicalAgg);

+                AggregationDesc logicalAgg = new AggregationDesc(aggregator.getGenericUDAFName(),

+                        aggregator.getGenericUDAFEvaluator(), aggregator.getParameters(), aggregator.getDistinct(),

+                        Mode.COMPLETE);

+                Mutable<ILogicalExpression> logicalAggExpr = t.translateAggregation(logicalAgg);

 

-				AlgebricksAggs.add(logicalAggExpr);

-				if (!gbyKeyNotRedKey)

-					aggExprs.add(logicalAggExpr);

-				else

-					aggExprs.add(aggExpr);

+                AlgebricksAggs.add(logicalAggExpr);

+                if (!gbyKeyNotRedKey)

+                    aggExprs.add(logicalAggExpr);

+                else

+                    aggExprs.add(aggExpr);

 

-				aggVariables.add(t.getVariable(aggregator.getExprString()

-						+ aggregator.getMode(), aggToType.get(aggregator)));

-			}

+                aggVariables.add(t.getVariable(aggregator.getExprString() + aggregator.getMode(),

+                        aggToType.get(aggregator)));

+            }

 

-			if (child.getType() != OperatorType.REDUCESINK)

-				gbyKeyNotRedKey = false;

+            if (child.getType() != OperatorType.REDUCESINK)

+                gbyKeyNotRedKey = false;

 

-			// get the sub plan list

-			AggregateOperator aggOperator = new AggregateOperator(aggVariables,

-					aggExprs);

-			NestedTupleSourceOperator nestedTupleSource = new NestedTupleSourceOperator(

-					new MutableObject<ILogicalOperator>());

-			aggOperator.getInputs().add(

-					new MutableObject<ILogicalOperator>(nestedTupleSource));

+            // get the sub plan list

+            AggregateOperator aggOperator = new AggregateOperator(aggVariables, aggExprs);

+            NestedTupleSourceOperator nestedTupleSource = new NestedTupleSourceOperator(

+                    new MutableObject<ILogicalOperator>());

+            aggOperator.getInputs().add(new MutableObject<ILogicalOperator>(nestedTupleSource));

 

-			List<Mutable<ILogicalOperator>> subRoots = new ArrayList<Mutable<ILogicalOperator>>();

-			subRoots.add(new MutableObject<ILogicalOperator>(aggOperator));

-			ILogicalPlan subPlan = new ALogicalPlanImpl(subRoots);

-			List<ILogicalPlan> subPlans = new ArrayList<ILogicalPlan>();

-			subPlans.add(subPlan);

+            List<Mutable<ILogicalOperator>> subRoots = new ArrayList<Mutable<ILogicalOperator>>();

+            subRoots.add(new MutableObject<ILogicalOperator>(aggOperator));

+            ILogicalPlan subPlan = new ALogicalPlanImpl(subRoots);

+            List<ILogicalPlan> subPlans = new ArrayList<ILogicalPlan>();

+            subPlans.add(subPlan);

 

-			// create the group by operator

-			currentOperator = new edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator(

-					keyParameters,

-					new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>(),

-					subPlans);

-			currentOperator.getInputs().add(AlgebricksParentOperatorRef);

-			nestedTupleSource.getDataSourceReference()

-					.setValue(currentOperator);

+            // create the group by operator

+            currentOperator = new edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator(

+                    keyParameters, new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>(), subPlans);

+            currentOperator.getInputs().add(AlgebricksParentOperatorRef);

+            nestedTupleSource.getDataSourceReference().setValue(currentOperator);

 

-			List<LogicalVariable> outputVariables = new ArrayList<LogicalVariable>();

-			outputVariables.addAll(keyVariables);

-			outputVariables.addAll(aggVariables);

-			t.rewriteOperatorOutputSchema(outputVariables, operator);

+            List<LogicalVariable> outputVariables = new ArrayList<LogicalVariable>();

+            outputVariables.addAll(keyVariables);

+            outputVariables.addAll(aggVariables);

+            t.rewriteOperatorOutputSchema(outputVariables, operator);

 

-			if (gbyKeyNotRedKey) {

-				currentOperator.getAnnotations().put(

-						HiveOperatorAnnotations.LOCAL_GROUP_BY, Boolean.TRUE);

-			}

+            if (gbyKeyNotRedKey) {

+                currentOperator.getAnnotations().put(HiveOperatorAnnotations.LOCAL_GROUP_BY, Boolean.TRUE);

+            }

 

-			HiveConf conf = ConfUtil.getHiveConf();

-			Boolean extGby = conf.getBoolean(

-					"hive.algebricks.groupby.external", false);

+            HiveConf conf = ConfUtil.getHiveConf();

+            Boolean extGby = conf.getBoolean("hive.algebricks.groupby.external", false);

 

-			if (extGby && isSerializable(aggregators)) {

-				currentOperator.getAnnotations()

-						.put(OperatorAnnotations.USE_EXTERNAL_GROUP_BY,

-								Boolean.TRUE);

-			}

-			return new MutableObject<ILogicalOperator>(currentOperator);

-		} else {

-			isDistinct = false;

-			// rewrite parameter expressions for all aggregators

-			int i = 0;

-			for (AggregationDesc aggregator : aggregators) {

-				for (ExprNodeDesc parameter : aggregator.getParameters()) {

-					t.rewriteExpression(parameter);

-				}

-				Mutable<ILogicalExpression> agg = t

-						.translateAggregation(aggregator);

-				AggregateFunctionCallExpression originalAgg = (AggregateFunctionCallExpression) AlgebricksAggs

-						.get(i).getValue();

-				originalAgg.setStepOneAggregate(localAggs.get(i));

-				AggregateFunctionCallExpression currentAgg = (AggregateFunctionCallExpression) agg

-						.getValue();

-				if (currentAgg.getFunctionInfo() != null) {

-					originalAgg.setTwoStep(true);

-					originalAgg.setStepTwoAggregate(currentAgg

-							.getFunctionInfo());

-				}

-				i++;

-			}

-			return null;

-		}

-	}

+            if (extGby && isSerializable(aggregators)) {

+                currentOperator.getAnnotations().put(OperatorAnnotations.USE_EXTERNAL_GROUP_BY, Boolean.TRUE);

+            }

+            return new MutableObject<ILogicalOperator>(currentOperator);

+        } else {

+            isDistinct = false;

+            // rewrite parameter expressions for all aggregators

+            int i = 0;

+            for (AggregationDesc aggregator : aggregators) {

+                for (ExprNodeDesc parameter : aggregator.getParameters()) {

+                    t.rewriteExpression(parameter);

+                }

+                Mutable<ILogicalExpression> agg = t.translateAggregation(aggregator);

+                AggregateFunctionCallExpression originalAgg = (AggregateFunctionCallExpression) AlgebricksAggs.get(i)

+                        .getValue();

+                originalAgg.setStepOneAggregate(localAggs.get(i));

+                AggregateFunctionCallExpression currentAgg = (AggregateFunctionCallExpression) agg.getValue();

+                if (currentAgg.getFunctionInfo() != null) {

+                    originalAgg.setTwoStep(true);

+                    originalAgg.setStepTwoAggregate(currentAgg.getFunctionInfo());

+                }

+                i++;

+            }

+            return null;

+        }

+    }

 

-	@Override

-	public Mutable<ILogicalOperator> visit(ReduceSinkOperator operator,

-			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) {

-		Operator downStream = (Operator) operator.getChildOperators().get(0);

-		if (!(downStream instanceof GroupByOperator)) {

-			return null;

-		}

+    @Override

+    public Mutable<ILogicalOperator> visit(ReduceSinkOperator operator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) {

+        Operator downStream = (Operator) operator.getChildOperators().get(0);

+        if (!(downStream instanceof GroupByOperator)) {

+            return null;

+        }

 

-		ReduceSinkDesc desc = (ReduceSinkDesc) operator.getConf();

-		List<ExprNodeDesc> keys = desc.getKeyCols();

-		List<ExprNodeDesc> values = desc.getValueCols();

+        ReduceSinkDesc desc = (ReduceSinkDesc) operator.getConf();

+        List<ExprNodeDesc> keys = desc.getKeyCols();

+        List<ExprNodeDesc> values = desc.getValueCols();

 

-		// insert assign for keys

-		ArrayList<LogicalVariable> keyVariables = new ArrayList<LogicalVariable>();

-		t.getAssignOperator(AlgebricksParentOperatorRef, keys, keyVariables);

+        // insert assign for keys

+        ArrayList<LogicalVariable> keyVariables = new ArrayList<LogicalVariable>();

+        t.getAssignOperator(AlgebricksParentOperatorRef, keys, keyVariables);

 

-		// insert assign for values

-		ArrayList<LogicalVariable> valueVariables = new ArrayList<LogicalVariable>();

-		t.getAssignOperator(AlgebricksParentOperatorRef, values, valueVariables);

+        // insert assign for values

+        ArrayList<LogicalVariable> valueVariables = new ArrayList<LogicalVariable>();

+        t.getAssignOperator(AlgebricksParentOperatorRef, values, valueVariables);

 

-		ArrayList<LogicalVariable> columns = new ArrayList<LogicalVariable>();

-		columns.addAll(keyVariables);

-		columns.addAll(valueVariables);

+        ArrayList<LogicalVariable> columns = new ArrayList<LogicalVariable>();

+        columns.addAll(keyVariables);

+        columns.addAll(valueVariables);

 

-		t.rewriteOperatorOutputSchema(columns, operator);

-		return null;

-	}

+        t.rewriteOperatorOutputSchema(columns, operator);

+        return null;

+    }

 

-	private boolean isSerializable(List<AggregationDesc> descs)

-			throws AlgebricksException {

-		try {

-			for (AggregationDesc desc : descs) {

-				GenericUDAFEvaluator udaf = desc.getGenericUDAFEvaluator();

-				AggregationBuffer buf = udaf.getNewAggregationBuffer();

-				Class<?> bufferClass = buf.getClass();

-				Field[] fields = bufferClass.getDeclaredFields();

-				for (Field field : fields) {

-					field.setAccessible(true);

-					String type = field.getType().toString();

-					if (!(type.equals("int") || type.equals("long")

-							|| type.equals("float") || type.equals("double") || type

-								.equals("boolean"))) {

-						return false;

-					}

-				}

+    private boolean isSerializable(List<AggregationDesc> descs) throws AlgebricksException {

+        try {

+            for (AggregationDesc desc : descs) {

+                GenericUDAFEvaluator udaf = desc.getGenericUDAFEvaluator();

+                AggregationBuffer buf = udaf.getNewAggregationBuffer();

+                Class<?> bufferClass = buf.getClass();

+                Field[] fields = bufferClass.getDeclaredFields();

+                for (Field field : fields) {

+                    field.setAccessible(true);

+                    String type = field.getType().toString();

+                    if (!(type.equals("int") || type.equals("long") || type.equals("float") || type.equals("double") || type

+                            .equals("boolean"))) {

+                        return false;

+                    }

+                }

 

-			}

-			return true;

-		} catch (Exception e) {

-			throw new AlgebricksException(e);

-		}

-	}

+            }

+            return true;

+        } catch (Exception e) {

+            throw new AlgebricksException(e);

+        }

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/JoinVisitor.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/JoinVisitor.java
index aea4be5..ef346bc 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/JoinVisitor.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/JoinVisitor.java
@@ -36,410 +36,382 @@
 @SuppressWarnings("rawtypes")

 public class JoinVisitor extends DefaultVisitor {

 

-	/**

-	 * reduce sink operator to variables

-	 */

-	private HashMap<Operator, List<LogicalVariable>> reduceSinkToKeyVariables = new HashMap<Operator, List<LogicalVariable>>();

+    /**

+     * reduce sink operator to variables

+     */

+    private HashMap<Operator, List<LogicalVariable>> reduceSinkToKeyVariables = new HashMap<Operator, List<LogicalVariable>>();

 

-	/**

-	 * reduce sink operator to variables

-	 */

-	private HashMap<Operator, List<String>> reduceSinkToFieldNames = new HashMap<Operator, List<String>>();

+    /**

+     * reduce sink operator to variables

+     */

+    private HashMap<Operator, List<String>> reduceSinkToFieldNames = new HashMap<Operator, List<String>>();

 

-	/**

-	 * reduce sink operator to variables

-	 */

-	private HashMap<Operator, List<TypeInfo>> reduceSinkToTypes = new HashMap<Operator, List<TypeInfo>>();

+    /**

+     * reduce sink operator to variables

+     */

+    private HashMap<Operator, List<TypeInfo>> reduceSinkToTypes = new HashMap<Operator, List<TypeInfo>>();

 

-	/**

-	 * map a join operator (in hive) to its parent operators (in hive)

-	 */

-	private HashMap<Operator, List<Operator>> operatorToHiveParents = new HashMap<Operator, List<Operator>>();

+    /**

+     * map a join operator (in hive) to its parent operators (in hive)

+     */

+    private HashMap<Operator, List<Operator>> operatorToHiveParents = new HashMap<Operator, List<Operator>>();

 

-	/**

-	 * map a join operator (in hive) to its parent operators (in asterix)

-	 */

-	private HashMap<Operator, List<ILogicalOperator>> operatorToAsterixParents = new HashMap<Operator, List<ILogicalOperator>>();

+    /**

+     * map a join operator (in hive) to its parent operators (in asterix)

+     */

+    private HashMap<Operator, List<ILogicalOperator>> operatorToAsterixParents = new HashMap<Operator, List<ILogicalOperator>>();

 

-	/**

-	 * the latest traversed reduce sink operator

-	 */

-	private Operator latestReduceSink = null;

+    /**

+     * the latest traversed reduce sink operator

+     */

+    private Operator latestReduceSink = null;

 

-	/**

-	 * the latest generated parent for join

-	 */

-	private ILogicalOperator latestAlgebricksOperator = null;

+    /**

+     * the latest generated parent for join

+     */

+    private ILogicalOperator latestAlgebricksOperator = null;

 

-	/**

-	 * process a join operator

-	 */

-	@Override

-	public Mutable<ILogicalOperator> visit(JoinOperator operator,

-			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) {

-		latestAlgebricksOperator = AlgebricksParentOperator.getValue();

-		translateJoinOperatorPreprocess(operator, t);

-		List<Operator> parents = operatorToHiveParents.get(operator);

-		if (parents.size() < operator.getParentOperators().size()) {

-			return null;

-		} else {

-			ILogicalOperator joinOp = translateJoinOperator(operator,

-					AlgebricksParentOperator, t);

-			// clearStatus();

-			return new MutableObject<ILogicalOperator>(joinOp);

-		}

-	}

+    /**

+     * process a join operator

+     */

+    @Override

+    public Mutable<ILogicalOperator> visit(JoinOperator operator, Mutable<ILogicalOperator> AlgebricksParentOperator,

+            Translator t) {

+        latestAlgebricksOperator = AlgebricksParentOperator.getValue();

+        translateJoinOperatorPreprocess(operator, t);

+        List<Operator> parents = operatorToHiveParents.get(operator);

+        if (parents.size() < operator.getParentOperators().size()) {

+            return null;

+        } else {

+            ILogicalOperator joinOp = translateJoinOperator(operator, AlgebricksParentOperator, t);

+            // clearStatus();

+            return new MutableObject<ILogicalOperator>(joinOp);

+        }

+    }

 

-	private void reorder(Byte[] order, List<ILogicalOperator> parents,

-			List<Operator> hiveParents) {

-		ILogicalOperator[] lops = new ILogicalOperator[parents.size()];

-		Operator[] ops = new Operator[hiveParents.size()];

+    private void reorder(Byte[] order, List<ILogicalOperator> parents, List<Operator> hiveParents) {

+        ILogicalOperator[] lops = new ILogicalOperator[parents.size()];

+        Operator[] ops = new Operator[hiveParents.size()];

 

-		for (Operator op : hiveParents) {

-			ReduceSinkOperator rop = (ReduceSinkOperator) op;

-			ReduceSinkDesc rdesc = rop.getConf();

-			int tag = rdesc.getTag();

+        for (Operator op : hiveParents) {

+            ReduceSinkOperator rop = (ReduceSinkOperator) op;

+            ReduceSinkDesc rdesc = rop.getConf();

+            int tag = rdesc.getTag();

 

-			int index = -1;

-			for (int i = 0; i < order.length; i++)

-				if (order[i] == tag) {

-					index = i;

-					break;

-				}

-			lops[index] = parents.get(hiveParents.indexOf(op));

-			ops[index] = op;

-		}

+            int index = -1;

+            for (int i = 0; i < order.length; i++)

+                if (order[i] == tag) {

+                    index = i;

+                    break;

+                }

+            lops[index] = parents.get(hiveParents.indexOf(op));

+            ops[index] = op;

+        }

 

-		parents.clear();

-		hiveParents.clear();

+        parents.clear();

+        hiveParents.clear();

 

-		for (int i = 0; i < lops.length; i++) {

-			parents.add(lops[i]);

-			hiveParents.add(ops[i]);

-		}

-	}

+        for (int i = 0; i < lops.length; i++) {

+            parents.add(lops[i]);

+            hiveParents.add(ops[i]);

+        }

+    }

 

-	/**

-	 * translate a hive join operator to asterix join operator->assign

-	 * operator->project operator

-	 * 

-	 * @param parentOperator

-	 * @param operator

-	 * @return

-	 */

-	private ILogicalOperator translateJoinOperator(Operator operator,

-			Mutable<ILogicalOperator> parentOperator, Translator t) {

+    /**

+     * translate a hive join operator to asterix join operator->assign

+     * operator->project operator

+     * 

+     * @param parentOperator

+     * @param operator

+     * @return

+     */

+    private ILogicalOperator translateJoinOperator(Operator operator, Mutable<ILogicalOperator> parentOperator,

+            Translator t) {

 

-		JoinDesc joinDesc = (JoinDesc) operator.getConf();

+        JoinDesc joinDesc = (JoinDesc) operator.getConf();

 

-		// get the projection expression (already re-written) from each source

-		// table

-		Map<Byte, List<ExprNodeDesc>> exprMap = joinDesc.getExprs();

-		reorder(joinDesc.getTagOrder(), operatorToAsterixParents.get(operator),

-				operatorToHiveParents.get(operator));

+        // get the projection expression (already re-written) from each source

+        // table

+        Map<Byte, List<ExprNodeDesc>> exprMap = joinDesc.getExprs();

+        reorder(joinDesc.getTagOrder(), operatorToAsterixParents.get(operator), operatorToHiveParents.get(operator));

 

-		// make an reduce join operator

-		ILogicalOperator currentOperator = generateJoinTree(

-				joinDesc.getCondsList(),

-				operatorToAsterixParents.get(operator),

-				operatorToHiveParents.get(operator), 0, t);

-		parentOperator = new MutableObject<ILogicalOperator>(currentOperator);

+        // make an reduce join operator

+        ILogicalOperator currentOperator = generateJoinTree(joinDesc.getCondsList(),

+                operatorToAsterixParents.get(operator), operatorToHiveParents.get(operator), 0, t);

+        parentOperator = new MutableObject<ILogicalOperator>(currentOperator);

 

-		// add assign and project operator on top of a join

-		// output variables

-		ArrayList<LogicalVariable> variables = new ArrayList<LogicalVariable>();

-		Set<Entry<Byte, List<ExprNodeDesc>>> entries = exprMap.entrySet();

-		Iterator<Entry<Byte, List<ExprNodeDesc>>> iterator = entries.iterator();

-		while (iterator.hasNext()) {

-			List<ExprNodeDesc> outputExprs = iterator.next().getValue();

-			ILogicalOperator assignOperator = t.getAssignOperator(

-					parentOperator, outputExprs, variables);

+        // add assign and project operator on top of a join

+        // output variables

+        ArrayList<LogicalVariable> variables = new ArrayList<LogicalVariable>();

+        Set<Entry<Byte, List<ExprNodeDesc>>> entries = exprMap.entrySet();

+        Iterator<Entry<Byte, List<ExprNodeDesc>>> iterator = entries.iterator();

+        while (iterator.hasNext()) {

+            List<ExprNodeDesc> outputExprs = iterator.next().getValue();

+            ILogicalOperator assignOperator = t.getAssignOperator(parentOperator, outputExprs, variables);

 

-			if (assignOperator != null) {

-				currentOperator = assignOperator;

-				parentOperator = new MutableObject<ILogicalOperator>(

-						currentOperator);

-			}

-		}

+            if (assignOperator != null) {

+                currentOperator = assignOperator;

+                parentOperator = new MutableObject<ILogicalOperator>(currentOperator);

+            }

+        }

 

-		ILogicalOperator po = new ProjectOperator(variables);

-		po.getInputs().add(parentOperator);

-		t.rewriteOperatorOutputSchema(variables, operator);

-		return po;

-	}

+        ILogicalOperator po = new ProjectOperator(variables);

+        po.getInputs().add(parentOperator);

+        t.rewriteOperatorOutputSchema(variables, operator);

+        return po;

+    }

 

-	/**

-	 * deal with reduce sink operator for the case of join

-	 */

-	@Override

-	public Mutable<ILogicalOperator> visit(ReduceSinkOperator operator,

-			Mutable<ILogicalOperator> parentOperator, Translator t) {

+    /**

+     * deal with reduce sink operator for the case of join

+     */

+    @Override

+    public Mutable<ILogicalOperator> visit(ReduceSinkOperator operator, Mutable<ILogicalOperator> parentOperator,

+            Translator t) {

 

-		Operator downStream = (Operator) operator.getChildOperators().get(0);

-		if (!(downStream instanceof JoinOperator))

-			return null;

+        Operator downStream = (Operator) operator.getChildOperators().get(0);

+        if (!(downStream instanceof JoinOperator))

+            return null;

 

-		ReduceSinkDesc desc = (ReduceSinkDesc) operator.getConf();

-		List<ExprNodeDesc> keys = desc.getKeyCols();

-		List<ExprNodeDesc> values = desc.getValueCols();

-		List<ExprNodeDesc> partitionCols = desc.getPartitionCols();

+        ReduceSinkDesc desc = (ReduceSinkDesc) operator.getConf();

+        List<ExprNodeDesc> keys = desc.getKeyCols();

+        List<ExprNodeDesc> values = desc.getValueCols();

+        List<ExprNodeDesc> partitionCols = desc.getPartitionCols();

 

-		/**

-		 * rewrite key, value, paritioncol expressions

-		 */

-		for (ExprNodeDesc key : keys)

-			t.rewriteExpression(key);

-		for (ExprNodeDesc value : values)

-			t.rewriteExpression(value);

-		for (ExprNodeDesc col : partitionCols)

-			t.rewriteExpression(col);

+        /**

+         * rewrite key, value, paritioncol expressions

+         */

+        for (ExprNodeDesc key : keys)

+            t.rewriteExpression(key);

+        for (ExprNodeDesc value : values)

+            t.rewriteExpression(value);

+        for (ExprNodeDesc col : partitionCols)

+            t.rewriteExpression(col);

 

-		ILogicalOperator currentOperator = null;

+        ILogicalOperator currentOperator = null;

 

-		// add assign operator for keys if necessary

-		ArrayList<LogicalVariable> keyVariables = new ArrayList<LogicalVariable>();

-		ILogicalOperator assignOperator = t.getAssignOperator(parentOperator,

-				keys, keyVariables);

-		if (assignOperator != null) {

-			currentOperator = assignOperator;

-			parentOperator = new MutableObject<ILogicalOperator>(

-					currentOperator);

-		}

+        // add assign operator for keys if necessary

+        ArrayList<LogicalVariable> keyVariables = new ArrayList<LogicalVariable>();

+        ILogicalOperator assignOperator = t.getAssignOperator(parentOperator, keys, keyVariables);

+        if (assignOperator != null) {

+            currentOperator = assignOperator;

+            parentOperator = new MutableObject<ILogicalOperator>(currentOperator);

+        }

 

-		// add assign operator for values if necessary

-		ArrayList<LogicalVariable> variables = new ArrayList<LogicalVariable>();

-		assignOperator = t.getAssignOperator(parentOperator, values, variables);

-		if (assignOperator != null) {

-			currentOperator = assignOperator;

-			parentOperator = new MutableObject<ILogicalOperator>(

-					currentOperator);

-		}

+        // add assign operator for values if necessary

+        ArrayList<LogicalVariable> variables = new ArrayList<LogicalVariable>();

+        assignOperator = t.getAssignOperator(parentOperator, values, variables);

+        if (assignOperator != null) {

+            currentOperator = assignOperator;

+            parentOperator = new MutableObject<ILogicalOperator>(currentOperator);

+        }

 

-		// unified schema: key, value

-		ArrayList<LogicalVariable> unifiedKeyValues = new ArrayList<LogicalVariable>();

-		unifiedKeyValues.addAll(keyVariables);

-		for (LogicalVariable value : variables)

-			if (keyVariables.indexOf(value) < 0)

-				unifiedKeyValues.add(value);

+        // unified schema: key, value

+        ArrayList<LogicalVariable> unifiedKeyValues = new ArrayList<LogicalVariable>();

+        unifiedKeyValues.addAll(keyVariables);

+        for (LogicalVariable value : variables)

+            if (keyVariables.indexOf(value) < 0)

+                unifiedKeyValues.add(value);

 

-		// insert projection operator, it is a *must*,

-		// in hive, reduce sink sometimes also do the projection operator's

-		// task

-		currentOperator = new ProjectOperator(unifiedKeyValues);

-		currentOperator.getInputs().add(parentOperator);

-		parentOperator = new MutableObject<ILogicalOperator>(currentOperator);

+        // insert projection operator, it is a *must*,

+        // in hive, reduce sink sometimes also do the projection operator's

+        // task

+        currentOperator = new ProjectOperator(unifiedKeyValues);

+        currentOperator.getInputs().add(parentOperator);

+        parentOperator = new MutableObject<ILogicalOperator>(currentOperator);

 

-		reduceSinkToKeyVariables.put(operator, keyVariables);

-		List<String> fieldNames = new ArrayList<String>();

-		List<TypeInfo> types = new ArrayList<TypeInfo>();

-		for (LogicalVariable var : unifiedKeyValues) {

-			fieldNames.add(var.toString());

-			types.add(t.getType(var));

-		}

-		reduceSinkToFieldNames.put(operator, fieldNames);

-		reduceSinkToTypes.put(operator, types);

-		t.rewriteOperatorOutputSchema(variables, operator);

+        reduceSinkToKeyVariables.put(operator, keyVariables);

+        List<String> fieldNames = new ArrayList<String>();

+        List<TypeInfo> types = new ArrayList<TypeInfo>();

+        for (LogicalVariable var : unifiedKeyValues) {

+            fieldNames.add(var.toString());

+            types.add(t.getType(var));

+        }

+        reduceSinkToFieldNames.put(operator, fieldNames);

+        reduceSinkToTypes.put(operator, types);

+        t.rewriteOperatorOutputSchema(variables, operator);

 

-		latestAlgebricksOperator = currentOperator;

-		latestReduceSink = operator;

-		return new MutableObject<ILogicalOperator>(currentOperator);

-	}

+        latestAlgebricksOperator = currentOperator;

+        latestReduceSink = operator;

+        return new MutableObject<ILogicalOperator>(currentOperator);

+    }

 

-	/**

-	 * partial rewrite a join operator

-	 * 

-	 * @param operator

-	 * @param t

-	 */

-	private void translateJoinOperatorPreprocess(Operator operator, Translator t) {

-		JoinDesc desc = (JoinDesc) operator.getConf();

-		ReduceSinkDesc reduceSinkDesc = (ReduceSinkDesc) latestReduceSink

-				.getConf();

-		int tag = reduceSinkDesc.getTag();

+    /**

+     * partial rewrite a join operator

+     * 

+     * @param operator

+     * @param t

+     */

+    private void translateJoinOperatorPreprocess(Operator operator, Translator t) {

+        JoinDesc desc = (JoinDesc) operator.getConf();

+        ReduceSinkDesc reduceSinkDesc = (ReduceSinkDesc) latestReduceSink.getConf();

+        int tag = reduceSinkDesc.getTag();

 

-		Map<Byte, List<ExprNodeDesc>> exprMap = desc.getExprs();

-		List<ExprNodeDesc> exprs = exprMap.get(Byte.valueOf((byte) tag));

+        Map<Byte, List<ExprNodeDesc>> exprMap = desc.getExprs();

+        List<ExprNodeDesc> exprs = exprMap.get(Byte.valueOf((byte) tag));

 

-		for (ExprNodeDesc expr : exprs)

-			t.rewriteExpression(expr);

+        for (ExprNodeDesc expr : exprs)

+            t.rewriteExpression(expr);

 

-		List<Operator> parents = operatorToHiveParents.get(operator);

-		if (parents == null) {

-			parents = new ArrayList<Operator>();

-			operatorToHiveParents.put(operator, parents);

-		}

-		parents.add(latestReduceSink);

+        List<Operator> parents = operatorToHiveParents.get(operator);

+        if (parents == null) {

+            parents = new ArrayList<Operator>();

+            operatorToHiveParents.put(operator, parents);

+        }

+        parents.add(latestReduceSink);

 

-		List<ILogicalOperator> asterixParents = operatorToAsterixParents

-				.get(operator);

-		if (asterixParents == null) {

-			asterixParents = new ArrayList<ILogicalOperator>();

-			operatorToAsterixParents.put(operator, asterixParents);

-		}

-		asterixParents.add(latestAlgebricksOperator);

-	}

+        List<ILogicalOperator> asterixParents = operatorToAsterixParents.get(operator);

+        if (asterixParents == null) {

+            asterixParents = new ArrayList<ILogicalOperator>();

+            operatorToAsterixParents.put(operator, asterixParents);

+        }

+        asterixParents.add(latestAlgebricksOperator);

+    }

 

-	// generate a join tree from a list of exchange/reducesink operator

-	// both exchanges and reduce sinks have the same order

-	private ILogicalOperator generateJoinTree(List<JoinCondDesc> conds,

-			List<ILogicalOperator> exchanges, List<Operator> reduceSinks,

-			int offset, Translator t) {

-		// get a list of reduce sink descs (input descs)

-		int inputSize = reduceSinks.size() - offset;

+    // generate a join tree from a list of exchange/reducesink operator

+    // both exchanges and reduce sinks have the same order

+    private ILogicalOperator generateJoinTree(List<JoinCondDesc> conds, List<ILogicalOperator> exchanges,

+            List<Operator> reduceSinks, int offset, Translator t) {

+        // get a list of reduce sink descs (input descs)

+        int inputSize = reduceSinks.size() - offset;

 

-		if (inputSize == 2) {

-			ILogicalOperator currentRoot;

+        if (inputSize == 2) {

+            ILogicalOperator currentRoot;

 

-			List<ReduceSinkDesc> reduceSinkDescs = new ArrayList<ReduceSinkDesc>();

-			for (int i = reduceSinks.size() - 1; i >= offset; i--)

-				reduceSinkDescs.add((ReduceSinkDesc) reduceSinks.get(i)

-						.getConf());

+            List<ReduceSinkDesc> reduceSinkDescs = new ArrayList<ReduceSinkDesc>();

+            for (int i = reduceSinks.size() - 1; i >= offset; i--)

+                reduceSinkDescs.add((ReduceSinkDesc) reduceSinks.get(i).getConf());

 

-			// get the object inspector for the join

-			List<String> fieldNames = new ArrayList<String>();

-			List<TypeInfo> types = new ArrayList<TypeInfo>();

-			for (int i = reduceSinks.size() - 1; i >= offset; i--) {

-				fieldNames

-						.addAll(reduceSinkToFieldNames.get(reduceSinks.get(i)));

-				types.addAll(reduceSinkToTypes.get(reduceSinks.get(i)));

-			}

+            // get the object inspector for the join

+            List<String> fieldNames = new ArrayList<String>();

+            List<TypeInfo> types = new ArrayList<TypeInfo>();

+            for (int i = reduceSinks.size() - 1; i >= offset; i--) {

+                fieldNames.addAll(reduceSinkToFieldNames.get(reduceSinks.get(i)));

+                types.addAll(reduceSinkToTypes.get(reduceSinks.get(i)));

+            }

 

-			// get number of equality conjunctions in the final join condition

-			int size = reduceSinkDescs.get(0).getKeyCols().size();

+            // get number of equality conjunctions in the final join condition

+            int size = reduceSinkDescs.get(0).getKeyCols().size();

 

-			// make up the join conditon expression

-			List<ExprNodeDesc> joinConditionChildren = new ArrayList<ExprNodeDesc>();

-			for (int i = 0; i < size; i++) {

-				// create a join key pair

-				List<ExprNodeDesc> keyPair = new ArrayList<ExprNodeDesc>();

-				for (ReduceSinkDesc sink : reduceSinkDescs) {

-					keyPair.add(sink.getKeyCols().get(i));

-				}

-				// create a hive equal condition

-				ExprNodeDesc equality = new ExprNodeGenericFuncDesc(

-						TypeInfoFactory.booleanTypeInfo,

-						new GenericUDFOPEqual(), keyPair);

-				// add the equal condition to the conjunction list

-				joinConditionChildren.add(equality);

-			}

-			// get final conjunction expression

-			ExprNodeDesc conjunct = null;

+            // make up the join conditon expression

+            List<ExprNodeDesc> joinConditionChildren = new ArrayList<ExprNodeDesc>();

+            for (int i = 0; i < size; i++) {

+                // create a join key pair

+                List<ExprNodeDesc> keyPair = new ArrayList<ExprNodeDesc>();

+                for (ReduceSinkDesc sink : reduceSinkDescs) {

+                    keyPair.add(sink.getKeyCols().get(i));

+                }

+                // create a hive equal condition

+                ExprNodeDesc equality = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo,

+                        new GenericUDFOPEqual(), keyPair);

+                // add the equal condition to the conjunction list

+                joinConditionChildren.add(equality);

+            }

+            // get final conjunction expression

+            ExprNodeDesc conjunct = null;

 

-			if (joinConditionChildren.size() > 1)

-				conjunct = new ExprNodeGenericFuncDesc(

-						TypeInfoFactory.booleanTypeInfo, new GenericUDFOPAnd(),

-						joinConditionChildren);

-			else if (joinConditionChildren.size() == 1)

-				conjunct = joinConditionChildren.get(0);

-			else {

-				// there is no join equality condition, equal-join

-				conjunct = new ExprNodeConstantDesc(

-						TypeInfoFactory.booleanTypeInfo, new Boolean(true));

-			}

-			// get an ILogicalExpression from hive's expression

-			Mutable<ILogicalExpression> expression = t

-					.translateScalarFucntion(conjunct);

+            if (joinConditionChildren.size() > 1)

+                conjunct = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, new GenericUDFOPAnd(),

+                        joinConditionChildren);

+            else if (joinConditionChildren.size() == 1)

+                conjunct = joinConditionChildren.get(0);

+            else {

+                // there is no join equality condition, equal-join

+                conjunct = new ExprNodeConstantDesc(TypeInfoFactory.booleanTypeInfo, new Boolean(true));

+            }

+            // get an ILogicalExpression from hive's expression

+            Mutable<ILogicalExpression> expression = t.translateScalarFucntion(conjunct);

 

-			Mutable<ILogicalOperator> leftBranch = new MutableObject<ILogicalOperator>(

-					exchanges.get(exchanges.size() - 1));

-			Mutable<ILogicalOperator> rightBranch = new MutableObject<ILogicalOperator>(

-					exchanges.get(exchanges.size() - 2));

-			// get the join operator

-			if (conds.get(offset).getType() == JoinDesc.LEFT_OUTER_JOIN) {

-				currentRoot = new LeftOuterJoinOperator(expression);

-				Mutable<ILogicalOperator> temp = leftBranch;

-				leftBranch = rightBranch;

-				rightBranch = temp;

-			} else if (conds.get(offset).getType() == JoinDesc.RIGHT_OUTER_JOIN) {

-				currentRoot = new LeftOuterJoinOperator(expression);

-			} else

-				currentRoot = new InnerJoinOperator(expression);

+            Mutable<ILogicalOperator> leftBranch = new MutableObject<ILogicalOperator>(

+                    exchanges.get(exchanges.size() - 1));

+            Mutable<ILogicalOperator> rightBranch = new MutableObject<ILogicalOperator>(

+                    exchanges.get(exchanges.size() - 2));

+            // get the join operator

+            if (conds.get(offset).getType() == JoinDesc.LEFT_OUTER_JOIN) {

+                currentRoot = new LeftOuterJoinOperator(expression);

+                Mutable<ILogicalOperator> temp = leftBranch;

+                leftBranch = rightBranch;

+                rightBranch = temp;

+            } else if (conds.get(offset).getType() == JoinDesc.RIGHT_OUTER_JOIN) {

+                currentRoot = new LeftOuterJoinOperator(expression);

+            } else

+                currentRoot = new InnerJoinOperator(expression);

 

-			currentRoot.getInputs().add(leftBranch);

-			currentRoot.getInputs().add(rightBranch);

+            currentRoot.getInputs().add(leftBranch);

+            currentRoot.getInputs().add(rightBranch);

 

-			// rewriteOperatorOutputSchema(variables, operator);

-			return currentRoot;

-		} else {

-			// get the child join operator and insert and one-to-one exchange

-			ILogicalOperator joinSrcOne = generateJoinTree(conds, exchanges,

-					reduceSinks, offset + 1, t);

-			// joinSrcOne.addInput(childJoin);

+            // rewriteOperatorOutputSchema(variables, operator);

+            return currentRoot;

+        } else {

+            // get the child join operator and insert and one-to-one exchange

+            ILogicalOperator joinSrcOne = generateJoinTree(conds, exchanges, reduceSinks, offset + 1, t);

+            // joinSrcOne.addInput(childJoin);

 

-			ILogicalOperator currentRoot;

+            ILogicalOperator currentRoot;

 

-			List<ReduceSinkDesc> reduceSinkDescs = new ArrayList<ReduceSinkDesc>();

-			for (int i = offset; i < offset + 2; i++)

-				reduceSinkDescs.add((ReduceSinkDesc) reduceSinks.get(i)

-						.getConf());

+            List<ReduceSinkDesc> reduceSinkDescs = new ArrayList<ReduceSinkDesc>();

+            for (int i = offset; i < offset + 2; i++)

+                reduceSinkDescs.add((ReduceSinkDesc) reduceSinks.get(i).getConf());

 

-			// get the object inspector for the join

-			List<String> fieldNames = new ArrayList<String>();

-			List<TypeInfo> types = new ArrayList<TypeInfo>();

-			for (int i = offset; i < reduceSinks.size(); i++) {

-				fieldNames

-						.addAll(reduceSinkToFieldNames.get(reduceSinks.get(i)));

-				types.addAll(reduceSinkToTypes.get(reduceSinks.get(i)));

-			}

+            // get the object inspector for the join

+            List<String> fieldNames = new ArrayList<String>();

+            List<TypeInfo> types = new ArrayList<TypeInfo>();

+            for (int i = offset; i < reduceSinks.size(); i++) {

+                fieldNames.addAll(reduceSinkToFieldNames.get(reduceSinks.get(i)));

+                types.addAll(reduceSinkToTypes.get(reduceSinks.get(i)));

+            }

 

-			// get number of equality conjunctions in the final join condition

-			int size = reduceSinkDescs.get(0).getKeyCols().size();

+            // get number of equality conjunctions in the final join condition

+            int size = reduceSinkDescs.get(0).getKeyCols().size();

 

-			// make up the join condition expression

-			List<ExprNodeDesc> joinConditionChildren = new ArrayList<ExprNodeDesc>();

-			for (int i = 0; i < size; i++) {

-				// create a join key pair

-				List<ExprNodeDesc> keyPair = new ArrayList<ExprNodeDesc>();

-				for (ReduceSinkDesc sink : reduceSinkDescs) {

-					keyPair.add(sink.getKeyCols().get(i));

-				}

-				// create a hive equal condition

-				ExprNodeDesc equality = new ExprNodeGenericFuncDesc(

-						TypeInfoFactory.booleanTypeInfo,

-						new GenericUDFOPEqual(), keyPair);

-				// add the equal condition to the conjunction list

-				joinConditionChildren.add(equality);

-			}

-			// get final conjunction expression

-			ExprNodeDesc conjunct = null;

+            // make up the join condition expression

+            List<ExprNodeDesc> joinConditionChildren = new ArrayList<ExprNodeDesc>();

+            for (int i = 0; i < size; i++) {

+                // create a join key pair

+                List<ExprNodeDesc> keyPair = new ArrayList<ExprNodeDesc>();

+                for (ReduceSinkDesc sink : reduceSinkDescs) {

+                    keyPair.add(sink.getKeyCols().get(i));

+                }

+                // create a hive equal condition

+                ExprNodeDesc equality = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo,

+                        new GenericUDFOPEqual(), keyPair);

+                // add the equal condition to the conjunction list

+                joinConditionChildren.add(equality);

+            }

+            // get final conjunction expression

+            ExprNodeDesc conjunct = null;

 

-			if (joinConditionChildren.size() > 1)

-				conjunct = new ExprNodeGenericFuncDesc(

-						TypeInfoFactory.booleanTypeInfo, new GenericUDFOPAnd(),

-						joinConditionChildren);

-			else if (joinConditionChildren.size() == 1)

-				conjunct = joinConditionChildren.get(0);

-			else {

-				// there is no join equality condition, full outer join

-				conjunct = new ExprNodeConstantDesc(

-						TypeInfoFactory.booleanTypeInfo, new Boolean(true));

-			}

-			// get an ILogicalExpression from hive's expression

-			Mutable<ILogicalExpression> expression = t

-					.translateScalarFucntion(conjunct);

+            if (joinConditionChildren.size() > 1)

+                conjunct = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, new GenericUDFOPAnd(),

+                        joinConditionChildren);

+            else if (joinConditionChildren.size() == 1)

+                conjunct = joinConditionChildren.get(0);

+            else {

+                // there is no join equality condition, full outer join

+                conjunct = new ExprNodeConstantDesc(TypeInfoFactory.booleanTypeInfo, new Boolean(true));

+            }

+            // get an ILogicalExpression from hive's expression

+            Mutable<ILogicalExpression> expression = t.translateScalarFucntion(conjunct);

 

-			Mutable<ILogicalOperator> leftBranch = new MutableObject<ILogicalOperator>(

-					joinSrcOne);

-			Mutable<ILogicalOperator> rightBranch = new MutableObject<ILogicalOperator>(

-					exchanges.get(offset));

+            Mutable<ILogicalOperator> leftBranch = new MutableObject<ILogicalOperator>(joinSrcOne);

+            Mutable<ILogicalOperator> rightBranch = new MutableObject<ILogicalOperator>(exchanges.get(offset));

 

-			// get the join operator

-			if (conds.get(offset).getType() == JoinDesc.LEFT_OUTER_JOIN) {

-				currentRoot = new LeftOuterJoinOperator(expression);

-				Mutable<ILogicalOperator> temp = leftBranch;

-				leftBranch = rightBranch;

-				rightBranch = temp;

-			} else if (conds.get(offset).getType() == JoinDesc.RIGHT_OUTER_JOIN) {

-				currentRoot = new LeftOuterJoinOperator(expression);

-			} else

-				currentRoot = new InnerJoinOperator(expression);

+            // get the join operator

+            if (conds.get(offset).getType() == JoinDesc.LEFT_OUTER_JOIN) {

+                currentRoot = new LeftOuterJoinOperator(expression);

+                Mutable<ILogicalOperator> temp = leftBranch;

+                leftBranch = rightBranch;

+                rightBranch = temp;

+            } else if (conds.get(offset).getType() == JoinDesc.RIGHT_OUTER_JOIN) {

+                currentRoot = new LeftOuterJoinOperator(expression);

+            } else

+                currentRoot = new InnerJoinOperator(expression);

 

-			// set the inputs from Algebricks join operator

-			// add the current table

-			currentRoot.getInputs().add(leftBranch);

-			currentRoot.getInputs().add(rightBranch);

+            // set the inputs from Algebricks join operator

+            // add the current table

+            currentRoot.getInputs().add(leftBranch);

+            currentRoot.getInputs().add(rightBranch);

 

-			return currentRoot;

-		}

-	}

+            return currentRoot;

+        }

+    }

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/LateralViewJoinVisitor.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/LateralViewJoinVisitor.java
index 004a8c2..5b6fde0 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/LateralViewJoinVisitor.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/LateralViewJoinVisitor.java
@@ -25,30 +25,23 @@
 /**

  * The lateral view join operator is used for FROM src LATERAL VIEW udtf()...

  * This operator was implemented with the following operator DAG in mind.

- * 

  * For a query such as

- * 

  * SELECT pageid, adid.* FROM example_table LATERAL VIEW explode(adid_list) AS

  * adid

- * 

  * The top of the operator DAG will look similar to

- * 

  * [Table Scan] | [Lateral View Forward] / \ [Select](*) [Select](adid_list) | |

  * | [UDTF] (explode) \ / [Lateral View Join] | | [Select] (pageid, adid.*) |

  * ....

- * 

  * Rows from the table scan operator are first to a lateral view forward

  * operator that just forwards the row and marks the start of a LV. The select

  * operator on the left picks all the columns while the select operator on the

  * right picks only the columns needed by the UDTF.

- * 

  * The output of select in the left branch and output of the UDTF in the right

  * branch are then sent to the lateral view join (LVJ). In most cases, the UDTF

  * will generate > 1 row for every row received from the TS, while the left

  * select operator will generate only one. For each row output from the TS, the

  * LVJ outputs all possible rows that can be created by joining the row from the

  * left select and one of the rows output from the UDTF.

- * 

  * Additional lateral views can be supported by adding a similar DAG after the

  * previous LVJ operator.

  */

@@ -56,69 +49,62 @@
 @SuppressWarnings("rawtypes")

 public class LateralViewJoinVisitor extends DefaultVisitor {

 

-	private UDTFDesc udtf;

+    private UDTFDesc udtf;

 

-	private List<Mutable<ILogicalOperator>> parents = new ArrayList<Mutable<ILogicalOperator>>();

+    private List<Mutable<ILogicalOperator>> parents = new ArrayList<Mutable<ILogicalOperator>>();

 

-	@Override

-	public Mutable<ILogicalOperator> visit(LateralViewJoinOperator operator,

-			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

-			throws AlgebricksException {

+    @Override

+    public Mutable<ILogicalOperator> visit(LateralViewJoinOperator operator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException {

 

-		parents.add(AlgebricksParentOperatorRef);

-		if (operator.getParentOperators().size() > parents.size()) {

-			return null;

-		}

+        parents.add(AlgebricksParentOperatorRef);

+        if (operator.getParentOperators().size() > parents.size()) {

+            return null;

+        }

 

-		Operator parent0 = operator.getParentOperators().get(0);

-		ILogicalOperator parentOperator;

-		ILogicalExpression unnestArg;

-		if (parent0 instanceof UDTFOperator) {

-			List<LogicalVariable> unnestVars = new ArrayList<LogicalVariable>();

-			VariableUtilities.getLiveVariables(parents.get(1).getValue(),

-					unnestVars);

-			unnestArg = new VariableReferenceExpression(unnestVars.get(0));

-			parentOperator = parents.get(1).getValue();

-		} else {

-			List<LogicalVariable> unnestVars = new ArrayList<LogicalVariable>();

-			VariableUtilities.getLiveVariables(parents.get(0).getValue(),

-					unnestVars);

-			unnestArg = new VariableReferenceExpression(unnestVars.get(0));

-			parentOperator = parents.get(0).getValue();

-		}

+        Operator parent0 = operator.getParentOperators().get(0);

+        ILogicalOperator parentOperator;

+        ILogicalExpression unnestArg;

+        if (parent0 instanceof UDTFOperator) {

+            List<LogicalVariable> unnestVars = new ArrayList<LogicalVariable>();

+            VariableUtilities.getLiveVariables(parents.get(1).getValue(), unnestVars);

+            unnestArg = new VariableReferenceExpression(unnestVars.get(0));

+            parentOperator = parents.get(1).getValue();

+        } else {

+            List<LogicalVariable> unnestVars = new ArrayList<LogicalVariable>();

+            VariableUtilities.getLiveVariables(parents.get(0).getValue(), unnestVars);

+            unnestArg = new VariableReferenceExpression(unnestVars.get(0));

+            parentOperator = parents.get(0).getValue();

+        }

 

-		LogicalVariable var = t.getVariable(udtf.toString(),

-				TypeInfoFactory.unknownTypeInfo);

+        LogicalVariable var = t.getVariable(udtf.toString(), TypeInfoFactory.unknownTypeInfo);

 

-		Mutable<ILogicalExpression> unnestExpr = t.translateUnnestFunction(

-				udtf, new MutableObject<ILogicalExpression>(unnestArg));

-		ILogicalOperator currentOperator = new UnnestOperator(var, unnestExpr);

+        Mutable<ILogicalExpression> unnestExpr = t.translateUnnestFunction(udtf, new MutableObject<ILogicalExpression>(

+                unnestArg));

+        ILogicalOperator currentOperator = new UnnestOperator(var, unnestExpr);

 

-		List<LogicalVariable> outputVars = new ArrayList<LogicalVariable>();

-		VariableUtilities.getLiveVariables(parentOperator, outputVars);

-		outputVars.add(var);

-		currentOperator.getInputs().add(

-				new MutableObject<ILogicalOperator>(parentOperator));

+        List<LogicalVariable> outputVars = new ArrayList<LogicalVariable>();

+        VariableUtilities.getLiveVariables(parentOperator, outputVars);

+        outputVars.add(var);

+        currentOperator.getInputs().add(new MutableObject<ILogicalOperator>(parentOperator));

 

-		parents.clear();

-		udtf = null;

-		t.rewriteOperatorOutputSchema(outputVars, operator);

-		return new MutableObject<ILogicalOperator>(currentOperator);

-	}

+        parents.clear();

+        udtf = null;

+        t.rewriteOperatorOutputSchema(outputVars, operator);

+        return new MutableObject<ILogicalOperator>(currentOperator);

+    }

 

-	@Override

-	public Mutable<ILogicalOperator> visit(UDTFOperator operator,

-			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) {

-		Schema currentSchema = t.generateInputSchema(operator

-				.getParentOperators().get(0));

-		udtf = (UDTFDesc) operator.getConf();

+    @Override

+    public Mutable<ILogicalOperator> visit(UDTFOperator operator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) {

+        Schema currentSchema = t.generateInputSchema(operator.getParentOperators().get(0));

+        udtf = (UDTFDesc) operator.getConf();

 

-		// populate the schema from upstream operator

-		operator.setSchema(operator.getParentOperators().get(0).getSchema());

-		List<LogicalVariable> latestOutputSchema = t

-				.getVariablesFromSchema(currentSchema);

-		t.rewriteOperatorOutputSchema(latestOutputSchema, operator);

-		return null;

-	}

+        // populate the schema from upstream operator

+        operator.setSchema(operator.getParentOperators().get(0).getSchema());

+        List<LogicalVariable> latestOutputSchema = t.getVariablesFromSchema(currentSchema);

+        t.rewriteOperatorOutputSchema(latestOutputSchema, operator);

+        return null;

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/LimitVisitor.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/LimitVisitor.java
index 84cdf00..4ca3ddc 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/LimitVisitor.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/LimitVisitor.java
@@ -18,27 +18,24 @@
 

 public class LimitVisitor extends DefaultVisitor {

 

-	@Override

-	public Mutable<ILogicalOperator> visit(LimitOperator operator,

-			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) {

-		Schema currentSchema = t.generateInputSchema(operator

-				.getParentOperators().get(0));

+    @Override

+    public Mutable<ILogicalOperator> visit(LimitOperator operator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) {

+        Schema currentSchema = t.generateInputSchema(operator.getParentOperators().get(0));

 

-		LimitDesc desc = (LimitDesc) operator.getConf();

-		int limit = desc.getLimit();

-		Integer limitValue = new Integer(limit);

+        LimitDesc desc = (LimitDesc) operator.getConf();

+        int limit = desc.getLimit();

+        Integer limitValue = new Integer(limit);

 

-		ILogicalExpression expr = new ConstantExpression(

-				new HivesterixConstantValue(limitValue));

-		ILogicalOperator currentOperator = new edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LimitOperator(

-				expr, true);

-		currentOperator.getInputs().add(AlgebricksParentOperatorRef);

+        ILogicalExpression expr = new ConstantExpression(new HivesterixConstantValue(limitValue));

+        ILogicalOperator currentOperator = new edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LimitOperator(

+                expr, true);

+        currentOperator.getInputs().add(AlgebricksParentOperatorRef);

 

-		operator.setSchema(operator.getParentOperators().get(0).getSchema());

-		List<LogicalVariable> latestOutputSchema = t

-				.getVariablesFromSchema(currentSchema);

-		t.rewriteOperatorOutputSchema(latestOutputSchema, operator);

-		return new MutableObject<ILogicalOperator>(currentOperator);

-	}

+        operator.setSchema(operator.getParentOperators().get(0).getSchema());

+        List<LogicalVariable> latestOutputSchema = t.getVariablesFromSchema(currentSchema);

+        t.rewriteOperatorOutputSchema(latestOutputSchema, operator);

+        return new MutableObject<ILogicalOperator>(currentOperator);

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/MapJoinVisitor.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/MapJoinVisitor.java
index fa5d014..4aba6a4 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/MapJoinVisitor.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/MapJoinVisitor.java
@@ -34,150 +34,138 @@
 @SuppressWarnings("rawtypes")

 public class MapJoinVisitor extends DefaultVisitor {

 

-	/**

-	 * map a join operator (in hive) to its parent operators (in asterix)

-	 */

-	private HashMap<Operator, List<Mutable<ILogicalOperator>>> opMap = new HashMap<Operator, List<Mutable<ILogicalOperator>>>();

+    /**

+     * map a join operator (in hive) to its parent operators (in asterix)

+     */

+    private HashMap<Operator, List<Mutable<ILogicalOperator>>> opMap = new HashMap<Operator, List<Mutable<ILogicalOperator>>>();

 

-	@Override

-	public Mutable<ILogicalOperator> visit(MapJoinOperator operator,

-			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) {

-		List<Operator<? extends Serializable>> joinSrc = operator

-				.getParentOperators();

-		List<Mutable<ILogicalOperator>> parents = opMap.get(operator);

-		if (parents == null) {

-			parents = new ArrayList<Mutable<ILogicalOperator>>();

-			opMap.put(operator, parents);

-		}

-		parents.add(AlgebricksParentOperatorRef);

-		if (joinSrc.size() != parents.size())

-			return null;

+    @Override

+    public Mutable<ILogicalOperator> visit(MapJoinOperator operator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) {

+        List<Operator<? extends Serializable>> joinSrc = operator.getParentOperators();

+        List<Mutable<ILogicalOperator>> parents = opMap.get(operator);

+        if (parents == null) {

+            parents = new ArrayList<Mutable<ILogicalOperator>>();

+            opMap.put(operator, parents);

+        }

+        parents.add(AlgebricksParentOperatorRef);

+        if (joinSrc.size() != parents.size())

+            return null;

 

-		ILogicalOperator currentOperator;

-		// make an map join operator

-		// TODO: will have trouble for n-way joins

-		MapJoinDesc joinDesc = (MapJoinDesc) operator.getConf();

+        ILogicalOperator currentOperator;

+        // make an map join operator

+        // TODO: will have trouble for n-way joins

+        MapJoinDesc joinDesc = (MapJoinDesc) operator.getConf();

 

-		Map<Byte, List<ExprNodeDesc>> keyMap = joinDesc.getKeys();

-		// get the projection expression (already re-written) from each source

-		// table

-		Map<Byte, List<ExprNodeDesc>> exprMap = joinDesc.getExprs();

+        Map<Byte, List<ExprNodeDesc>> keyMap = joinDesc.getKeys();

+        // get the projection expression (already re-written) from each source

+        // table

+        Map<Byte, List<ExprNodeDesc>> exprMap = joinDesc.getExprs();

 

-		int inputSize = operator.getParentOperators().size();

-		// get a list of reduce sink descs (input descs)

+        int inputSize = operator.getParentOperators().size();

+        // get a list of reduce sink descs (input descs)

 

-		// get the parent operator

-		List<Mutable<ILogicalOperator>> parentOps = parents;

+        // get the parent operator

+        List<Mutable<ILogicalOperator>> parentOps = parents;

 

-		List<String> fieldNames = new ArrayList<String>();

-		List<TypeInfo> types = new ArrayList<TypeInfo>();

-		for (Operator ts : joinSrc) {

-			List<ColumnInfo> columns = ts.getSchema().getSignature();

-			for (ColumnInfo col : columns) {

-				fieldNames.add(col.getInternalName());

-				types.add(col.getType());

-			}

-		}

+        List<String> fieldNames = new ArrayList<String>();

+        List<TypeInfo> types = new ArrayList<TypeInfo>();

+        for (Operator ts : joinSrc) {

+            List<ColumnInfo> columns = ts.getSchema().getSignature();

+            for (ColumnInfo col : columns) {

+                fieldNames.add(col.getInternalName());

+                types.add(col.getType());

+            }

+        }

 

-		// get number of equality conjunctions in the final join condition

-		Set<Entry<Byte, List<ExprNodeDesc>>> keyEntries = keyMap.entrySet();

-		Iterator<Entry<Byte, List<ExprNodeDesc>>> entry = keyEntries.iterator();

+        // get number of equality conjunctions in the final join condition

+        Set<Entry<Byte, List<ExprNodeDesc>>> keyEntries = keyMap.entrySet();

+        Iterator<Entry<Byte, List<ExprNodeDesc>>> entry = keyEntries.iterator();

 

-		int size = 0;

-		if (entry.hasNext())

-			size = entry.next().getValue().size();

+        int size = 0;

+        if (entry.hasNext())

+            size = entry.next().getValue().size();

 

-		// make up the join conditon expression

-		List<ExprNodeDesc> joinConditionChildren = new ArrayList<ExprNodeDesc>();

-		for (int i = 0; i < size; i++) {

-			// create a join key pair

-			List<ExprNodeDesc> keyPair = new ArrayList<ExprNodeDesc>();

-			for (int j = 0; j < inputSize; j++) {

-				keyPair.add(keyMap.get(Byte.valueOf((byte) j)).get(i));

-			}

-			// create a hive equal condition

-			ExprNodeDesc equality = new ExprNodeGenericFuncDesc(

-					TypeInfoFactory.booleanTypeInfo, new GenericUDFOPEqual(),

-					keyPair);

-			// add the equal condition to the conjunction list

-			joinConditionChildren.add(equality);

-		}

-		// get final conjunction expression

-		ExprNodeDesc conjunct = null;

+        // make up the join conditon expression

+        List<ExprNodeDesc> joinConditionChildren = new ArrayList<ExprNodeDesc>();

+        for (int i = 0; i < size; i++) {

+            // create a join key pair

+            List<ExprNodeDesc> keyPair = new ArrayList<ExprNodeDesc>();

+            for (int j = 0; j < inputSize; j++) {

+                keyPair.add(keyMap.get(Byte.valueOf((byte) j)).get(i));

+            }

+            // create a hive equal condition

+            ExprNodeDesc equality = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo,

+                    new GenericUDFOPEqual(), keyPair);

+            // add the equal condition to the conjunction list

+            joinConditionChildren.add(equality);

+        }

+        // get final conjunction expression

+        ExprNodeDesc conjunct = null;

 

-		if (joinConditionChildren.size() > 1)

-			conjunct = new ExprNodeGenericFuncDesc(

-					TypeInfoFactory.booleanTypeInfo, new GenericUDFOPAnd(),

-					joinConditionChildren);

-		else if (joinConditionChildren.size() == 1)

-			conjunct = joinConditionChildren.get(0);

-		else {

-			// there is no join equality condition, full outer join

-			conjunct = new ExprNodeConstantDesc(

-					TypeInfoFactory.booleanTypeInfo, new Boolean(true));

-		}

-		// get an ILogicalExpression from hive's expression

-		Mutable<ILogicalExpression> expression = t

-				.translateScalarFucntion(conjunct);

+        if (joinConditionChildren.size() > 1)

+            conjunct = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, new GenericUDFOPAnd(),

+                    joinConditionChildren);

+        else if (joinConditionChildren.size() == 1)

+            conjunct = joinConditionChildren.get(0);

+        else {

+            // there is no join equality condition, full outer join

+            conjunct = new ExprNodeConstantDesc(TypeInfoFactory.booleanTypeInfo, new Boolean(true));

+        }

+        // get an ILogicalExpression from hive's expression

+        Mutable<ILogicalExpression> expression = t.translateScalarFucntion(conjunct);

 

-		ArrayList<LogicalVariable> left = new ArrayList<LogicalVariable>();

-		ArrayList<LogicalVariable> right = new ArrayList<LogicalVariable>();

+        ArrayList<LogicalVariable> left = new ArrayList<LogicalVariable>();

+        ArrayList<LogicalVariable> right = new ArrayList<LogicalVariable>();

 

-		Set<Entry<Byte, List<ExprNodeDesc>>> kentries = keyMap.entrySet();

-		Iterator<Entry<Byte, List<ExprNodeDesc>>> kiterator = kentries

-				.iterator();

-		int iteration = 0;

-		ILogicalOperator assignOperator = null;

-		while (kiterator.hasNext()) {

-			List<ExprNodeDesc> outputExprs = kiterator.next().getValue();

+        Set<Entry<Byte, List<ExprNodeDesc>>> kentries = keyMap.entrySet();

+        Iterator<Entry<Byte, List<ExprNodeDesc>>> kiterator = kentries.iterator();

+        int iteration = 0;

+        ILogicalOperator assignOperator = null;

+        while (kiterator.hasNext()) {

+            List<ExprNodeDesc> outputExprs = kiterator.next().getValue();

 

-			if (iteration == 0)

-				assignOperator = t.getAssignOperator(

-						AlgebricksParentOperatorRef, outputExprs, left);

-			else

-				assignOperator = t.getAssignOperator(

-						AlgebricksParentOperatorRef, outputExprs, right);

+            if (iteration == 0)

+                assignOperator = t.getAssignOperator(AlgebricksParentOperatorRef, outputExprs, left);

+            else

+                assignOperator = t.getAssignOperator(AlgebricksParentOperatorRef, outputExprs, right);

 

-			if (assignOperator != null) {

-				currentOperator = assignOperator;

-				AlgebricksParentOperatorRef = new MutableObject<ILogicalOperator>(

-						currentOperator);

-			}

-			iteration++;

-		}

+            if (assignOperator != null) {

+                currentOperator = assignOperator;

+                AlgebricksParentOperatorRef = new MutableObject<ILogicalOperator>(currentOperator);

+            }

+            iteration++;

+        }

 

-		List<Mutable<ILogicalOperator>> inputs = parentOps;

+        List<Mutable<ILogicalOperator>> inputs = parentOps;

 

-		// get the join operator

-		currentOperator = new InnerJoinOperator(expression);

+        // get the join operator

+        currentOperator = new InnerJoinOperator(expression);

 

-		// set the inputs from asterix join operator

-		for (Mutable<ILogicalOperator> input : inputs)

-			currentOperator.getInputs().add(input);

-		AlgebricksParentOperatorRef = new MutableObject<ILogicalOperator>(

-				currentOperator);

+        // set the inputs from asterix join operator

+        for (Mutable<ILogicalOperator> input : inputs)

+            currentOperator.getInputs().add(input);

+        AlgebricksParentOperatorRef = new MutableObject<ILogicalOperator>(currentOperator);

 

-		// add assign and project operator

-		// output variables

-		ArrayList<LogicalVariable> variables = new ArrayList<LogicalVariable>();

-		Set<Entry<Byte, List<ExprNodeDesc>>> entries = exprMap.entrySet();

-		Iterator<Entry<Byte, List<ExprNodeDesc>>> iterator = entries.iterator();

-		while (iterator.hasNext()) {

-			List<ExprNodeDesc> outputExprs = iterator.next().getValue();

-			assignOperator = t.getAssignOperator(AlgebricksParentOperatorRef,

-					outputExprs, variables);

+        // add assign and project operator

+        // output variables

+        ArrayList<LogicalVariable> variables = new ArrayList<LogicalVariable>();

+        Set<Entry<Byte, List<ExprNodeDesc>>> entries = exprMap.entrySet();

+        Iterator<Entry<Byte, List<ExprNodeDesc>>> iterator = entries.iterator();

+        while (iterator.hasNext()) {

+            List<ExprNodeDesc> outputExprs = iterator.next().getValue();

+            assignOperator = t.getAssignOperator(AlgebricksParentOperatorRef, outputExprs, variables);

 

-			if (assignOperator != null) {

-				currentOperator = assignOperator;

-				AlgebricksParentOperatorRef = new MutableObject<ILogicalOperator>(

-						currentOperator);

-			}

-		}

+            if (assignOperator != null) {

+                currentOperator = assignOperator;

+                AlgebricksParentOperatorRef = new MutableObject<ILogicalOperator>(currentOperator);

+            }

+        }

 

-		currentOperator = new ProjectOperator(variables);

-		currentOperator.getInputs().add(AlgebricksParentOperatorRef);

-		t.rewriteOperatorOutputSchema(variables, operator);

-		// opMap.clear();

-		return new MutableObject<ILogicalOperator>(currentOperator);

-	}

+        currentOperator = new ProjectOperator(variables);

+        currentOperator.getInputs().add(AlgebricksParentOperatorRef);

+        t.rewriteOperatorOutputSchema(variables, operator);

+        // opMap.clear();

+        return new MutableObject<ILogicalOperator>(currentOperator);

+    }

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/ProjectVisitor.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/ProjectVisitor.java
index 0d2067c..eb0922f 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/ProjectVisitor.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/ProjectVisitor.java
@@ -17,42 +17,40 @@
 

 public class ProjectVisitor extends DefaultVisitor {

 

-	/**

-	 * translate project operator

-	 */

-	@Override

-	public Mutable<ILogicalOperator> visit(SelectOperator operator,

-			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) {

+    /**

+     * translate project operator

+     */

+    @Override

+    public Mutable<ILogicalOperator> visit(SelectOperator operator, Mutable<ILogicalOperator> AlgebricksParentOperator,

+            Translator t) {

 

-		SelectDesc desc = (SelectDesc) operator.getConf();

+        SelectDesc desc = (SelectDesc) operator.getConf();

 

-		if (desc == null)

-			return null;

+        if (desc == null)

+            return null;

 

-		List<ExprNodeDesc> cols = desc.getColList();

+        List<ExprNodeDesc> cols = desc.getColList();

 

-		if (cols == null)

-			return null;

+        if (cols == null)

+            return null;

 

-		// insert assign operator if necessary

-		ArrayList<LogicalVariable> variables = new ArrayList<LogicalVariable>();

+        // insert assign operator if necessary

+        ArrayList<LogicalVariable> variables = new ArrayList<LogicalVariable>();

 

-		for (ExprNodeDesc expr : cols)

-			t.rewriteExpression(expr);

+        for (ExprNodeDesc expr : cols)

+            t.rewriteExpression(expr);

 

-		ILogicalOperator assignOp = t.getAssignOperator(

-				AlgebricksParentOperator, cols, variables);

-		ILogicalOperator currentOperator = null;

-		if (assignOp != null) {

-			currentOperator = assignOp;

-			AlgebricksParentOperator = new MutableObject<ILogicalOperator>(

-					currentOperator);

-		}

+        ILogicalOperator assignOp = t.getAssignOperator(AlgebricksParentOperator, cols, variables);

+        ILogicalOperator currentOperator = null;

+        if (assignOp != null) {

+            currentOperator = assignOp;

+            AlgebricksParentOperator = new MutableObject<ILogicalOperator>(currentOperator);

+        }

 

-		currentOperator = new ProjectOperator(variables);

-		currentOperator.getInputs().add(AlgebricksParentOperator);

-		t.rewriteOperatorOutputSchema(variables, operator);

-		return new MutableObject<ILogicalOperator>(currentOperator);

-	}

+        currentOperator = new ProjectOperator(variables);

+        currentOperator.getInputs().add(AlgebricksParentOperator);

+        t.rewriteOperatorOutputSchema(variables, operator);

+        return new MutableObject<ILogicalOperator>(currentOperator);

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/SortVisitor.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/SortVisitor.java
index a2c0d03..325b632 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/SortVisitor.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/SortVisitor.java
@@ -26,100 +26,88 @@
 

 public class SortVisitor extends DefaultVisitor {

 

-	@SuppressWarnings("rawtypes")

-	@Override

-	public Mutable<ILogicalOperator> visit(ReduceSinkOperator operator,

-			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

-			throws AlgebricksException {

-		ReduceSinkDesc desc = (ReduceSinkDesc) operator.getConf();

-		Operator downStream = (Operator) operator.getChildOperators().get(0);

-		List<ExprNodeDesc> keys = desc.getKeyCols();

-		if (!(downStream instanceof ExtractOperator

-				&& desc.getNumReducers() == 1 && keys.size() > 0)) {

-			return null;

-		}

+    @SuppressWarnings("rawtypes")

+    @Override

+    public Mutable<ILogicalOperator> visit(ReduceSinkOperator operator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException {

+        ReduceSinkDesc desc = (ReduceSinkDesc) operator.getConf();

+        Operator downStream = (Operator) operator.getChildOperators().get(0);

+        List<ExprNodeDesc> keys = desc.getKeyCols();

+        if (!(downStream instanceof ExtractOperator && desc.getNumReducers() == 1 && keys.size() > 0)) {

+            return null;

+        }

 

-		List<ExprNodeDesc> schema = new ArrayList<ExprNodeDesc>();

-		List<ExprNodeDesc> values = desc.getValueCols();

-		List<ExprNodeDesc> partitionCols = desc.getPartitionCols();

-		for (ExprNodeDesc key : keys) {

-			t.rewriteExpression(key);

-		}

-		for (ExprNodeDesc value : values) {

-			t.rewriteExpression(value);

-		}

-		for (ExprNodeDesc col : partitionCols) {

-			t.rewriteExpression(col);

-		}

+        List<ExprNodeDesc> schema = new ArrayList<ExprNodeDesc>();

+        List<ExprNodeDesc> values = desc.getValueCols();

+        List<ExprNodeDesc> partitionCols = desc.getPartitionCols();

+        for (ExprNodeDesc key : keys) {

+            t.rewriteExpression(key);

+        }

+        for (ExprNodeDesc value : values) {

+            t.rewriteExpression(value);

+        }

+        for (ExprNodeDesc col : partitionCols) {

+            t.rewriteExpression(col);

+        }

 

-		// add a order-by operator and limit if any

-		List<Pair<IOrder, Mutable<ILogicalExpression>>> pairs = new ArrayList<Pair<IOrder, Mutable<ILogicalExpression>>>();

-		char[] orders = desc.getOrder().toCharArray();

-		int i = 0;

-		for (ExprNodeDesc key : keys) {

-			Mutable<ILogicalExpression> expr = t.translateScalarFucntion(key);

-			IOrder order = orders[i] == '+' ? OrderOperator.ASC_ORDER

-					: OrderOperator.DESC_ORDER;

+        // add a order-by operator and limit if any

+        List<Pair<IOrder, Mutable<ILogicalExpression>>> pairs = new ArrayList<Pair<IOrder, Mutable<ILogicalExpression>>>();

+        char[] orders = desc.getOrder().toCharArray();

+        int i = 0;

+        for (ExprNodeDesc key : keys) {

+            Mutable<ILogicalExpression> expr = t.translateScalarFucntion(key);

+            IOrder order = orders[i] == '+' ? OrderOperator.ASC_ORDER : OrderOperator.DESC_ORDER;

 

-			Pair<IOrder, Mutable<ILogicalExpression>> pair = new Pair<IOrder, Mutable<ILogicalExpression>>(

-					order, expr);

-			pairs.add(pair);

-			i++;

-		}

+            Pair<IOrder, Mutable<ILogicalExpression>> pair = new Pair<IOrder, Mutable<ILogicalExpression>>(order, expr);

+            pairs.add(pair);

+            i++;

+        }

 

-		// get input variables

-		ArrayList<LogicalVariable> inputVariables = new ArrayList<LogicalVariable>();

-		VariableUtilities.getProducedVariables(

-				AlgebricksParentOperatorRef.getValue(), inputVariables);

+        // get input variables

+        ArrayList<LogicalVariable> inputVariables = new ArrayList<LogicalVariable>();

+        VariableUtilities.getProducedVariables(AlgebricksParentOperatorRef.getValue(), inputVariables);

 

-		ArrayList<LogicalVariable> keyVariables = new ArrayList<LogicalVariable>();

-		ILogicalOperator currentOperator;

-		ILogicalOperator assignOp = t.getAssignOperator(

-				AlgebricksParentOperatorRef, keys, keyVariables);

-		if (assignOp != null) {

-			currentOperator = assignOp;

-			AlgebricksParentOperatorRef = new MutableObject<ILogicalOperator>(

-					currentOperator);

-		}

+        ArrayList<LogicalVariable> keyVariables = new ArrayList<LogicalVariable>();

+        ILogicalOperator currentOperator;

+        ILogicalOperator assignOp = t.getAssignOperator(AlgebricksParentOperatorRef, keys, keyVariables);

+        if (assignOp != null) {

+            currentOperator = assignOp;

+            AlgebricksParentOperatorRef = new MutableObject<ILogicalOperator>(currentOperator);

+        }

 

-		OrderColumn[] keyColumns = new OrderColumn[keyVariables.size()];

+        OrderColumn[] keyColumns = new OrderColumn[keyVariables.size()];

 

-		for (int j = 0; j < keyColumns.length; j++)

-			keyColumns[j] = new OrderColumn(keyVariables.get(j),

-					pairs.get(j).first.getKind());

+        for (int j = 0; j < keyColumns.length; j++)

+            keyColumns[j] = new OrderColumn(keyVariables.get(j), pairs.get(j).first.getKind());

 

-		// handle order operator

-		currentOperator = new OrderOperator(pairs);

-		currentOperator.getInputs().add(AlgebricksParentOperatorRef);

-		AlgebricksParentOperatorRef = new MutableObject<ILogicalOperator>(

-				currentOperator);

+        // handle order operator

+        currentOperator = new OrderOperator(pairs);

+        currentOperator.getInputs().add(AlgebricksParentOperatorRef);

+        AlgebricksParentOperatorRef = new MutableObject<ILogicalOperator>(currentOperator);

 

-		// project back, remove generated sort-key columns if any

-		if (assignOp != null) {

-			currentOperator = new ProjectOperator(inputVariables);

-			currentOperator.getInputs().add(AlgebricksParentOperatorRef);

-			AlgebricksParentOperatorRef = new MutableObject<ILogicalOperator>(

-					currentOperator);

-		}

+        // project back, remove generated sort-key columns if any

+        if (assignOp != null) {

+            currentOperator = new ProjectOperator(inputVariables);

+            currentOperator.getInputs().add(AlgebricksParentOperatorRef);

+            AlgebricksParentOperatorRef = new MutableObject<ILogicalOperator>(currentOperator);

+        }

 

-		/**

-		 * a special rule for hive's order by output schema of reduce sink

-		 * operator only contains the columns

-		 */

-		for (ExprNodeDesc value : values) {

-			schema.add(value);

-		}

+        /**

+         * a special rule for hive's order by output schema of reduce sink

+         * operator only contains the columns

+         */

+        for (ExprNodeDesc value : values) {

+            schema.add(value);

+        }

 

-		ArrayList<LogicalVariable> variables = new ArrayList<LogicalVariable>();

-		ILogicalOperator assignOperator = t.getAssignOperator(

-				AlgebricksParentOperatorRef, schema, variables);

-		t.rewriteOperatorOutputSchema(variables, operator);

+        ArrayList<LogicalVariable> variables = new ArrayList<LogicalVariable>();

+        ILogicalOperator assignOperator = t.getAssignOperator(AlgebricksParentOperatorRef, schema, variables);

+        t.rewriteOperatorOutputSchema(variables, operator);

 

-		if (assignOperator != null) {

-			currentOperator = assignOperator;

-			AlgebricksParentOperatorRef = new MutableObject<ILogicalOperator>(

-					currentOperator);

-		}

-		return new MutableObject<ILogicalOperator>(currentOperator);

-	}

+        if (assignOperator != null) {

+            currentOperator = assignOperator;

+            AlgebricksParentOperatorRef = new MutableObject<ILogicalOperator>(currentOperator);

+        }

+        return new MutableObject<ILogicalOperator>(currentOperator);

+    }

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/TableScanWriteVisitor.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/TableScanWriteVisitor.java
index 3e12bb9..fe5eac2 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/TableScanWriteVisitor.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/TableScanWriteVisitor.java
@@ -34,115 +34,102 @@
 

 public class TableScanWriteVisitor extends DefaultVisitor {

 

-	/**

-	 * map from alias to partition desc

-	 */

-	private HashMap<String, PartitionDesc> aliasToPathMap;

+    /**

+     * map from alias to partition desc

+     */

+    private HashMap<String, PartitionDesc> aliasToPathMap;

 

-	/**

-	 * map from partition desc to data source

-	 */

-	private HashMap<PartitionDesc, IDataSource<PartitionDesc>> dataSourceMap = new HashMap<PartitionDesc, IDataSource<PartitionDesc>>();

+    /**

+     * map from partition desc to data source

+     */

+    private HashMap<PartitionDesc, IDataSource<PartitionDesc>> dataSourceMap = new HashMap<PartitionDesc, IDataSource<PartitionDesc>>();

 

-	/**

-	 * constructor

-	 * 

-	 * @param aliasToPathMap

-	 */

-	public TableScanWriteVisitor(HashMap<String, PartitionDesc> aliasToPathMap) {

-		this.aliasToPathMap = aliasToPathMap;

-	}

+    /**

+     * constructor

+     * 

+     * @param aliasToPathMap

+     */

+    public TableScanWriteVisitor(HashMap<String, PartitionDesc> aliasToPathMap) {

+        this.aliasToPathMap = aliasToPathMap;

+    }

 

-	@Override

-	public Mutable<ILogicalOperator> visit(TableScanOperator operator,

-			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

-			throws AlgebricksException {

-		TableScanDesc desc = (TableScanDesc) operator.getConf();

-		if (desc == null) {

-			List<LogicalVariable> schema = new ArrayList<LogicalVariable>();

-			VariableUtilities.getLiveVariables(

-					AlgebricksParentOperator.getValue(), schema);

-			t.rewriteOperatorOutputSchema(schema, operator);

-			return null;

-		}

+    @Override

+    public Mutable<ILogicalOperator> visit(TableScanOperator operator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) throws AlgebricksException {

+        TableScanDesc desc = (TableScanDesc) operator.getConf();

+        if (desc == null) {

+            List<LogicalVariable> schema = new ArrayList<LogicalVariable>();

+            VariableUtilities.getLiveVariables(AlgebricksParentOperator.getValue(), schema);

+            t.rewriteOperatorOutputSchema(schema, operator);

+            return null;

+        }

 

-		List<ColumnInfo> columns = operator.getSchema().getSignature();

-		for (int i = columns.size() - 1; i >= 0; i--)

-			if (columns.get(i).getIsVirtualCol() == true)

-				columns.remove(i);

+        List<ColumnInfo> columns = operator.getSchema().getSignature();

+        for (int i = columns.size() - 1; i >= 0; i--)

+            if (columns.get(i).getIsVirtualCol() == true)

+                columns.remove(i);

 

-		// start with empty tuple operator

-		List<TypeInfo> types = new ArrayList<TypeInfo>();

-		ArrayList<LogicalVariable> variables = new ArrayList<LogicalVariable>();

-		List<String> names = new ArrayList<String>();

-		for (ColumnInfo column : columns) {

-			types.add(column.getType());

+        // start with empty tuple operator

+        List<TypeInfo> types = new ArrayList<TypeInfo>();

+        ArrayList<LogicalVariable> variables = new ArrayList<LogicalVariable>();

+        List<String> names = new ArrayList<String>();

+        for (ColumnInfo column : columns) {

+            types.add(column.getType());

 

-			LogicalVariable var = t.getVariableFromFieldName(column

-					.getTabAlias() + "." + column.getInternalName());

-			LogicalVariable varNew;

+            LogicalVariable var = t.getVariableFromFieldName(column.getTabAlias() + "." + column.getInternalName());

+            LogicalVariable varNew;

 

-			if (var != null) {

-				varNew = t.getVariable(

-						column.getTabAlias() + "." + column.getInternalName()

-								+ operator.toString(), column.getType());

-				t.replaceVariable(var, varNew);

-				var = varNew;

-			} else

-				var = t.getNewVariable(

-						column.getTabAlias() + "." + column.getInternalName(),

-						column.getType());

+            if (var != null) {

+                varNew = t.getVariable(column.getTabAlias() + "." + column.getInternalName() + operator.toString(),

+                        column.getType());

+                t.replaceVariable(var, varNew);

+                var = varNew;

+            } else

+                var = t.getNewVariable(column.getTabAlias() + "." + column.getInternalName(), column.getType());

 

-			variables.add(var);

-			names.add(column.getInternalName());

-		}

-		Schema currentSchema = new Schema(names, types);

+            variables.add(var);

+            names.add(column.getInternalName());

+        }

+        Schema currentSchema = new Schema(names, types);

 

-		String alias = desc.getAlias();

-		PartitionDesc partDesc = aliasToPathMap.get(alias);

-		IDataSource<PartitionDesc> dataSource = new HiveDataSource<PartitionDesc>(

-				partDesc, currentSchema.getSchema());

-		ILogicalOperator currentOperator = new DataSourceScanOperator(

-				variables, dataSource);

+        String alias = desc.getAlias();

+        PartitionDesc partDesc = aliasToPathMap.get(alias);

+        IDataSource<PartitionDesc> dataSource = new HiveDataSource<PartitionDesc>(partDesc, currentSchema.getSchema());

+        ILogicalOperator currentOperator = new DataSourceScanOperator(variables, dataSource);

 

-		// set empty tuple source operator

-		ILogicalOperator ets = new EmptyTupleSourceOperator();

-		currentOperator.getInputs().add(

-				new MutableObject<ILogicalOperator>(ets));

+        // set empty tuple source operator

+        ILogicalOperator ets = new EmptyTupleSourceOperator();

+        currentOperator.getInputs().add(new MutableObject<ILogicalOperator>(ets));

 

-		// setup data source

-		dataSourceMap.put(partDesc, dataSource);

-		t.rewriteOperatorOutputSchema(variables, operator);

-		return new MutableObject<ILogicalOperator>(currentOperator);

-	}

+        // setup data source

+        dataSourceMap.put(partDesc, dataSource);

+        t.rewriteOperatorOutputSchema(variables, operator);

+        return new MutableObject<ILogicalOperator>(currentOperator);

+    }

 

-	@Override

-	public Mutable<ILogicalOperator> visit(FileSinkOperator hiveOperator,

-			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) {

+    @Override

+    public Mutable<ILogicalOperator> visit(FileSinkOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) {

 

-		if (hiveOperator.getChildOperators() != null

-				&& hiveOperator.getChildOperators().size() > 0)

-			return null;

+        if (hiveOperator.getChildOperators() != null && hiveOperator.getChildOperators().size() > 0)

+            return null;

 

-		Schema currentSchema = t.generateInputSchema(hiveOperator

-				.getParentOperators().get(0));

+        Schema currentSchema = t.generateInputSchema(hiveOperator.getParentOperators().get(0));

 

-		IDataSink sink = new HiveDataSink(hiveOperator,

-				currentSchema.getSchema());

-		List<Mutable<ILogicalExpression>> exprList = new ArrayList<Mutable<ILogicalExpression>>();

-		for (String column : currentSchema.getNames()) {

-			exprList.add(new MutableObject<ILogicalExpression>(

-					new VariableReferenceExpression(t.getVariable(column))));

-		}

+        IDataSink sink = new HiveDataSink(hiveOperator, currentSchema.getSchema());

+        List<Mutable<ILogicalExpression>> exprList = new ArrayList<Mutable<ILogicalExpression>>();

+        for (String column : currentSchema.getNames()) {

+            exprList.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(t.getVariable(column))));

+        }

 

-		ILogicalOperator currentOperator = new WriteOperator(exprList, sink);

-		if (AlgebricksParentOperator != null) {

-			currentOperator.getInputs().add(AlgebricksParentOperator);

-		}

+        ILogicalOperator currentOperator = new WriteOperator(exprList, sink);

+        if (AlgebricksParentOperator != null) {

+            currentOperator.getInputs().add(AlgebricksParentOperator);

+        }

 

-		IMetadataProvider<PartitionDesc, Object> metaData = new HiveMetaDataProvider<PartitionDesc, Object>(

-				hiveOperator, currentSchema, dataSourceMap);

-		t.setMetadataProvider(metaData);

-		return new MutableObject<ILogicalOperator>(currentOperator);

-	}

+        IMetadataProvider<PartitionDesc, Object> metaData = new HiveMetaDataProvider<PartitionDesc, Object>(

+                hiveOperator, currentSchema, dataSourceMap);

+        t.setMetadataProvider(metaData);

+        return new MutableObject<ILogicalOperator>(currentOperator);

+    }

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/UnionVisitor.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/UnionVisitor.java
index f4e74f6..96b9463 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/UnionVisitor.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/UnionVisitor.java
@@ -18,47 +18,45 @@
 

 public class UnionVisitor extends DefaultVisitor {

 

-	List<Mutable<ILogicalOperator>> parents = new ArrayList<Mutable<ILogicalOperator>>();

+    List<Mutable<ILogicalOperator>> parents = new ArrayList<Mutable<ILogicalOperator>>();

 

-	@Override

-	public Mutable<ILogicalOperator> visit(UnionOperator operator,

-			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

-			throws AlgebricksException {

+    @Override

+    public Mutable<ILogicalOperator> visit(UnionOperator operator, Mutable<ILogicalOperator> AlgebricksParentOperator,

+            Translator t) throws AlgebricksException {

 

-		parents.add(AlgebricksParentOperator);

-		if (operator.getParentOperators().size() > parents.size()) {

-			return null;

-		}

+        parents.add(AlgebricksParentOperator);

+        if (operator.getParentOperators().size() > parents.size()) {

+            return null;

+        }

 

-		List<LogicalVariable> leftVars = new ArrayList<LogicalVariable>();

-		List<LogicalVariable> rightVars = new ArrayList<LogicalVariable>();

+        List<LogicalVariable> leftVars = new ArrayList<LogicalVariable>();

+        List<LogicalVariable> rightVars = new ArrayList<LogicalVariable>();

 

-		VariableUtilities.getUsedVariables(parents.get(0).getValue(), leftVars);

-		VariableUtilities

-				.getUsedVariables(parents.get(1).getValue(), rightVars);

+        VariableUtilities.getUsedVariables(parents.get(0).getValue(), leftVars);

+        VariableUtilities.getUsedVariables(parents.get(1).getValue(), rightVars);

 

-		List<Triple<LogicalVariable, LogicalVariable, LogicalVariable>> triples = new ArrayList<Triple<LogicalVariable, LogicalVariable, LogicalVariable>>();

-		List<LogicalVariable> unionVars = new ArrayList<LogicalVariable>();

+        List<Triple<LogicalVariable, LogicalVariable, LogicalVariable>> triples = new ArrayList<Triple<LogicalVariable, LogicalVariable, LogicalVariable>>();

+        List<LogicalVariable> unionVars = new ArrayList<LogicalVariable>();

 

-		for (int i = 0; i < leftVars.size(); i++) {

-			LogicalVariable unionVar = t.getVariable(leftVars.get(i).getId()

-					+ "union" + AlgebricksParentOperator.hashCode(),

-					TypeInfoFactory.unknownTypeInfo);

-			unionVars.add(unionVar);

-			Triple<LogicalVariable, LogicalVariable, LogicalVariable> triple = new Triple<LogicalVariable, LogicalVariable, LogicalVariable>(

-					leftVars.get(i), rightVars.get(i), unionVar);

-			t.replaceVariable(leftVars.get(i), unionVar);

-			t.replaceVariable(rightVars.get(i), unionVar);

-			triples.add(triple);

-		}

-		ILogicalOperator currentOperator = new edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator(

-				triples);

-		for (Mutable<ILogicalOperator> parent : parents)

-			currentOperator.getInputs().add(parent);

+        for (int i = 0; i < leftVars.size(); i++) {

+            LogicalVariable unionVar = t.getVariable(

+                    leftVars.get(i).getId() + "union" + AlgebricksParentOperator.hashCode(),

+                    TypeInfoFactory.unknownTypeInfo);

+            unionVars.add(unionVar);

+            Triple<LogicalVariable, LogicalVariable, LogicalVariable> triple = new Triple<LogicalVariable, LogicalVariable, LogicalVariable>(

+                    leftVars.get(i), rightVars.get(i), unionVar);

+            t.replaceVariable(leftVars.get(i), unionVar);

+            t.replaceVariable(rightVars.get(i), unionVar);

+            triples.add(triple);

+        }

+        ILogicalOperator currentOperator = new edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator(

+                triples);

+        for (Mutable<ILogicalOperator> parent : parents)

+            currentOperator.getInputs().add(parent);

 

-		t.rewriteOperatorOutputSchema(unionVars, operator);

-		parents.clear();

-		return new MutableObject<ILogicalOperator>(currentOperator);

-	}

+        t.rewriteOperatorOutputSchema(unionVars, operator);

+        parents.clear();

+        return new MutableObject<ILogicalOperator>(currentOperator);

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/base/DefaultVisitor.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/base/DefaultVisitor.java
index 20013e3..d298553 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/base/DefaultVisitor.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/base/DefaultVisitor.java
@@ -31,136 +31,115 @@
  */

 public class DefaultVisitor implements Visitor {

 

-	@Override

-	public Mutable<ILogicalOperator> visit(CollectOperator hiveOperator,

-			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

-			throws AlgebricksException {

-		return null;

-	}

+    @Override

+    public Mutable<ILogicalOperator> visit(CollectOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) throws AlgebricksException {

+        return null;

+    }

 

-	@Override

-	public Mutable<ILogicalOperator> visit(JoinOperator hiveOperator,

-			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

-			throws AlgebricksException {

-		return null;

-	}

+    @Override

+    public Mutable<ILogicalOperator> visit(JoinOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) throws AlgebricksException {

+        return null;

+    }

 

-	@Override

-	public Mutable<ILogicalOperator> visit(ExtractOperator hiveOperator,

-			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

-			throws AlgebricksException {

-		return null;

-	}

+    @Override

+    public Mutable<ILogicalOperator> visit(ExtractOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) throws AlgebricksException {

+        return null;

+    }

 

-	@Override

-	public Mutable<ILogicalOperator> visit(MapJoinOperator hiveOperator,

-			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

-			throws AlgebricksException {

-		return null;

-	}

+    @Override

+    public Mutable<ILogicalOperator> visit(MapJoinOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) throws AlgebricksException {

+        return null;

+    }

 

-	@Override

-	public Mutable<ILogicalOperator> visit(SMBMapJoinOperator hiveOperator,

-			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

-			throws AlgebricksException {

-		return null;

-	}

+    @Override

+    public Mutable<ILogicalOperator> visit(SMBMapJoinOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) throws AlgebricksException {

+        return null;

+    }

 

-	public Mutable<ILogicalOperator> visit(FileSinkOperator hiveOperator,

-			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

-			throws AlgebricksException {

-		return null;

-	}

+    public Mutable<ILogicalOperator> visit(FileSinkOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) throws AlgebricksException {

+        return null;

+    }

 

-	public Mutable<ILogicalOperator> visit(ReduceSinkOperator hiveOperator,

-			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

-			throws AlgebricksException {

-		return null;

-	}

+    public Mutable<ILogicalOperator> visit(ReduceSinkOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) throws AlgebricksException {

+        return null;

+    }

 

-	@Override

-	public Mutable<ILogicalOperator> visit(FilterOperator hiveOperator,

-			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

-			throws AlgebricksException {

-		return null;

-	}

+    @Override

+    public Mutable<ILogicalOperator> visit(FilterOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) throws AlgebricksException {

+        return null;

+    }

 

-	@Override

-	public Mutable<ILogicalOperator> visit(ForwardOperator hiveOperator,

-			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

-			throws AlgebricksException {

-		return null;

-	}

+    @Override

+    public Mutable<ILogicalOperator> visit(ForwardOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) throws AlgebricksException {

+        return null;

+    }

 

-	@Override

-	public Mutable<ILogicalOperator> visit(GroupByOperator hiveOperator,

-			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

-			throws AlgebricksException {

-		return null;

-	}

+    @Override

+    public Mutable<ILogicalOperator> visit(GroupByOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) throws AlgebricksException {

+        return null;

+    }

 

-	@Override

-	public Mutable<ILogicalOperator> visit(

-			LateralViewForwardOperator hiveOperator,

-			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

-			throws AlgebricksException {

-		return null;

-	}

+    @Override

+    public Mutable<ILogicalOperator> visit(LateralViewForwardOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) throws AlgebricksException {

+        return null;

+    }

 

-	@Override

-	public Mutable<ILogicalOperator> visit(

-			LateralViewJoinOperator hiveOperator,

-			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

-			throws AlgebricksException {

-		return null;

-	}

+    @Override

+    public Mutable<ILogicalOperator> visit(LateralViewJoinOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) throws AlgebricksException {

+        return null;

+    }

 

-	@Override

-	public Mutable<ILogicalOperator> visit(LimitOperator hiveOperator,

-			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

-			throws AlgebricksException {

-		return null;

-	}

+    @Override

+    public Mutable<ILogicalOperator> visit(LimitOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) throws AlgebricksException {

+        return null;

+    }

 

-	@Override

-	public Mutable<ILogicalOperator> visit(MapOperator hiveOperator,

-			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

-			throws AlgebricksException {

-		return null;

-	}

+    @Override

+    public Mutable<ILogicalOperator> visit(MapOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) throws AlgebricksException {

+        return null;

+    }

 

-	@Override

-	public Mutable<ILogicalOperator> visit(ScriptOperator hiveOperator,

-			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

-			throws AlgebricksException {

-		return null;

-	}

+    @Override

+    public Mutable<ILogicalOperator> visit(ScriptOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) throws AlgebricksException {

+        return null;

+    }

 

-	@Override

-	public Mutable<ILogicalOperator> visit(SelectOperator hiveOperator,

-			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

-			throws AlgebricksException {

-		return null;

-	}

+    @Override

+    public Mutable<ILogicalOperator> visit(SelectOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) throws AlgebricksException {

+        return null;

+    }

 

-	@Override

-	public Mutable<ILogicalOperator> visit(TableScanOperator hiveOperator,

-			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

-			throws AlgebricksException {

-		return null;

-	}

+    @Override

+    public Mutable<ILogicalOperator> visit(TableScanOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) throws AlgebricksException {

+        return null;

+    }

 

-	@Override

-	public Mutable<ILogicalOperator> visit(UDTFOperator operator,

-			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

-			throws AlgebricksException {

-		return null;

-	}

+    @Override

+    public Mutable<ILogicalOperator> visit(UDTFOperator operator, Mutable<ILogicalOperator> AlgebricksParentOperator,

+            Translator t) throws AlgebricksException {

+        return null;

+    }

 

-	@Override

-	public Mutable<ILogicalOperator> visit(UnionOperator operator,

-			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

-			throws AlgebricksException {

-		return null;

-	}

+    @Override

+    public Mutable<ILogicalOperator> visit(UnionOperator operator, Mutable<ILogicalOperator> AlgebricksParentOperator,

+            Translator t) throws AlgebricksException {

+        return null;

+    }

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/base/Translator.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/base/Translator.java
index 9165386..d9eb50c 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/base/Translator.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/base/Translator.java
@@ -20,155 +20,151 @@
 @SuppressWarnings("rawtypes")

 public interface Translator {

 

-	/**

-	 * generate input schema

-	 * 

-	 * @param operator

-	 * @return

-	 */

-	public Schema generateInputSchema(Operator operator);

+    /**

+     * generate input schema

+     * 

+     * @param operator

+     * @return

+     */

+    public Schema generateInputSchema(Operator operator);

 

-	/**

-	 * rewrite the names of output columns for feture expression evaluators to

-	 * use

-	 * 

-	 * @param operator

-	 */

-	public void rewriteOperatorOutputSchema(List<LogicalVariable> vars,

-			Operator operator);

+    /**

+     * rewrite the names of output columns for feture expression evaluators to

+     * use

+     * 

+     * @param operator

+     */

+    public void rewriteOperatorOutputSchema(List<LogicalVariable> vars, Operator operator);

 

-	/**

-	 * rewrite the names of output columns for feture expression evaluators to

-	 * use

-	 * 

-	 * @param operator

-	 */

-	public void rewriteOperatorOutputSchema(Operator operator);

+    /**

+     * rewrite the names of output columns for feture expression evaluators to

+     * use

+     * 

+     * @param operator

+     */

+    public void rewriteOperatorOutputSchema(Operator operator);

 

-	/**

-	 * rewrite an expression and substitute variables

-	 * 

-	 * @param expr

-	 *            hive expression

-	 */

-	public void rewriteExpression(ExprNodeDesc expr);

+    /**

+     * rewrite an expression and substitute variables

+     * 

+     * @param expr

+     *            hive expression

+     */

+    public void rewriteExpression(ExprNodeDesc expr);

 

-	/**

-	 * rewrite an expression and substitute variables

-	 * 

-	 * @param expr

-	 *            hive expression

-	 */

-	public void rewriteExpressionPartial(ExprNodeDesc expr);

+    /**

+     * rewrite an expression and substitute variables

+     * 

+     * @param expr

+     *            hive expression

+     */

+    public void rewriteExpressionPartial(ExprNodeDesc expr);

 

-	/**

-	 * get an assign operator as a child of parent

-	 * 

-	 * @param parent

-	 * @param cols

-	 * @param variables

-	 * @return

-	 */

-	public ILogicalOperator getAssignOperator(Mutable<ILogicalOperator> parent,

-			List<ExprNodeDesc> cols, ArrayList<LogicalVariable> variables);

+    /**

+     * get an assign operator as a child of parent

+     * 

+     * @param parent

+     * @param cols

+     * @param variables

+     * @return

+     */

+    public ILogicalOperator getAssignOperator(Mutable<ILogicalOperator> parent, List<ExprNodeDesc> cols,

+            ArrayList<LogicalVariable> variables);

 

-	/**

-	 * get type for a logical variable

-	 * 

-	 * @param var

-	 * @return type info

-	 */

-	public TypeInfo getType(LogicalVariable var);

+    /**

+     * get type for a logical variable

+     * 

+     * @param var

+     * @return type info

+     */

+    public TypeInfo getType(LogicalVariable var);

 

-	/**

-	 * translate an expression from hive to Algebricks

-	 * 

-	 * @param desc

-	 * @return

-	 */

-	public Mutable<ILogicalExpression> translateScalarFucntion(ExprNodeDesc desc);

+    /**

+     * translate an expression from hive to Algebricks

+     * 

+     * @param desc

+     * @return

+     */

+    public Mutable<ILogicalExpression> translateScalarFucntion(ExprNodeDesc desc);

 

-	/**

-	 * translate an aggregation from hive to Algebricks

-	 * 

-	 * @param aggregateDesc

-	 * @return

-	 */

-	public Mutable<ILogicalExpression> translateAggregation(

-			AggregationDesc aggregateDesc);

+    /**

+     * translate an aggregation from hive to Algebricks

+     * 

+     * @param aggregateDesc

+     * @return

+     */

+    public Mutable<ILogicalExpression> translateAggregation(AggregationDesc aggregateDesc);

 

-	/**

-	 * translate unnesting (UDTF) function expression

-	 * 

-	 * @param aggregator

-	 * @return

-	 */

-	public Mutable<ILogicalExpression> translateUnnestFunction(

-			UDTFDesc udtfDesc, Mutable<ILogicalExpression> argument);

+    /**

+     * translate unnesting (UDTF) function expression

+     * 

+     * @param aggregator

+     * @return

+     */

+    public Mutable<ILogicalExpression> translateUnnestFunction(UDTFDesc udtfDesc, Mutable<ILogicalExpression> argument);

 

-	/**

-	 * get variable from a schema

-	 * 

-	 * @param schema

-	 * @return

-	 */

-	public List<LogicalVariable> getVariablesFromSchema(Schema schema);

+    /**

+     * get variable from a schema

+     * 

+     * @param schema

+     * @return

+     */

+    public List<LogicalVariable> getVariablesFromSchema(Schema schema);

 

-	/**

-	 * get variable from name

-	 * 

-	 * @param name

-	 * @return

-	 */

-	public LogicalVariable getVariable(String name);

+    /**

+     * get variable from name

+     * 

+     * @param name

+     * @return

+     */

+    public LogicalVariable getVariable(String name);

 

-	/**

-	 * get variable from field name

-	 * 

-	 * @param name

-	 * @return

-	 */

-	public LogicalVariable getVariableFromFieldName(String name);

+    /**

+     * get variable from field name

+     * 

+     * @param name

+     * @return

+     */

+    public LogicalVariable getVariableFromFieldName(String name);

 

-	/**

-	 * get variable from name, type

-	 * 

-	 * @param fieldName

-	 * @param type

-	 * @return

-	 */

-	public LogicalVariable getVariable(String fieldName, TypeInfo type);

+    /**

+     * get variable from name, type

+     * 

+     * @param fieldName

+     * @param type

+     * @return

+     */

+    public LogicalVariable getVariable(String fieldName, TypeInfo type);

 

-	/**

-	 * get new variable from name, type

-	 * 

-	 * @param fieldName

-	 * @param type

-	 * @return

-	 */

-	public LogicalVariable getNewVariable(String fieldName, TypeInfo type);

+    /**

+     * get new variable from name, type

+     * 

+     * @param fieldName

+     * @param type

+     * @return

+     */

+    public LogicalVariable getNewVariable(String fieldName, TypeInfo type);

 

-	/**

-	 * set the metadata provider

-	 * 

-	 * @param metadata

-	 */

-	public void setMetadataProvider(

-			IMetadataProvider<PartitionDesc, Object> metadata);

+    /**

+     * set the metadata provider

+     * 

+     * @param metadata

+     */

+    public void setMetadataProvider(IMetadataProvider<PartitionDesc, Object> metadata);

 

-	/**

-	 * get the metadata provider

-	 * 

-	 * @param metadata

-	 */

-	public IMetadataProvider<PartitionDesc, Object> getMetadataProvider();

+    /**

+     * get the metadata provider

+     * 

+     * @param metadata

+     */

+    public IMetadataProvider<PartitionDesc, Object> getMetadataProvider();

 

-	/**

-	 * replace the variable

-	 * 

-	 * @param oldVar

-	 * @param newVar

-	 */

-	public void replaceVariable(LogicalVariable oldVar, LogicalVariable newVar);

+    /**

+     * replace the variable

+     * 

+     * @param oldVar

+     * @param newVar

+     */

+    public void replaceVariable(LogicalVariable oldVar, LogicalVariable newVar);

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/base/Visitor.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/base/Visitor.java
index 745f93e..11ae357 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/base/Visitor.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/base/Visitor.java
@@ -26,81 +26,60 @@
 

 public interface Visitor {

 

-	public Mutable<ILogicalOperator> visit(CollectOperator hiveOperator,

-			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

-			throws AlgebricksException;

+    public Mutable<ILogicalOperator> visit(CollectOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

 

-	public Mutable<ILogicalOperator> visit(JoinOperator hiveOperator,

-			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

-			throws AlgebricksException;

+    public Mutable<ILogicalOperator> visit(JoinOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

 

-	public Mutable<ILogicalOperator> visit(ExtractOperator hiveOperator,

-			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

-			throws AlgebricksException;

+    public Mutable<ILogicalOperator> visit(ExtractOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

 

-	public Mutable<ILogicalOperator> visit(MapJoinOperator hiveOperator,

-			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

-			throws AlgebricksException;

+    public Mutable<ILogicalOperator> visit(MapJoinOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

 

-	public Mutable<ILogicalOperator> visit(SMBMapJoinOperator hiveOperator,

-			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

-			throws AlgebricksException;

+    public Mutable<ILogicalOperator> visit(SMBMapJoinOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

 

-	public Mutable<ILogicalOperator> visit(FilterOperator hiveOperator,

-			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

-			throws AlgebricksException;

+    public Mutable<ILogicalOperator> visit(FilterOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

 

-	public Mutable<ILogicalOperator> visit(ForwardOperator hiveOperator,

-			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

-			throws AlgebricksException;

+    public Mutable<ILogicalOperator> visit(ForwardOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

 

-	public Mutable<ILogicalOperator> visit(GroupByOperator hiveOperator,

-			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

-			throws AlgebricksException;

+    public Mutable<ILogicalOperator> visit(GroupByOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

 

-	public Mutable<ILogicalOperator> visit(

-			LateralViewForwardOperator hiveOperator,

-			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

-			throws AlgebricksException;

+    public Mutable<ILogicalOperator> visit(LateralViewForwardOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

 

-	public Mutable<ILogicalOperator> visit(

-			LateralViewJoinOperator hiveOperator,

-			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

-			throws AlgebricksException;

+    public Mutable<ILogicalOperator> visit(LateralViewJoinOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

 

-	public Mutable<ILogicalOperator> visit(LimitOperator hiveOperator,

-			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

-			throws AlgebricksException;

+    public Mutable<ILogicalOperator> visit(LimitOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

 

-	public Mutable<ILogicalOperator> visit(MapOperator hiveOperator,

-			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

-			throws AlgebricksException;

+    public Mutable<ILogicalOperator> visit(MapOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

 

-	public Mutable<ILogicalOperator> visit(ScriptOperator hiveOperator,

-			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

-			throws AlgebricksException;

+    public Mutable<ILogicalOperator> visit(ScriptOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

 

-	public Mutable<ILogicalOperator> visit(SelectOperator hiveOperator,

-			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

-			throws AlgebricksException;

+    public Mutable<ILogicalOperator> visit(SelectOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

 

-	public Mutable<ILogicalOperator> visit(TableScanOperator hiveOperator,

-			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

-			throws AlgebricksException;

+    public Mutable<ILogicalOperator> visit(TableScanOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

 

-	public Mutable<ILogicalOperator> visit(FileSinkOperator hiveOperator,

-			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

-			throws AlgebricksException;

+    public Mutable<ILogicalOperator> visit(FileSinkOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

 

-	public Mutable<ILogicalOperator> visit(ReduceSinkOperator hiveOperator,

-			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

-			throws AlgebricksException;

+    public Mutable<ILogicalOperator> visit(ReduceSinkOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

 

-	public Mutable<ILogicalOperator> visit(UDTFOperator operator,

-			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

-			throws AlgebricksException;

+    public Mutable<ILogicalOperator> visit(UDTFOperator operator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

 

-	public Mutable<ILogicalOperator> visit(UnionOperator operator,

-			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

-			throws AlgebricksException;

+    public Mutable<ILogicalOperator> visit(UnionOperator operator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/optimizer/rulecollections/HiveRuleCollections.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/optimizer/rulecollections/HiveRuleCollections.java
index 4ebea0a..7e4e271 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/optimizer/rulecollections/HiveRuleCollections.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/optimizer/rulecollections/HiveRuleCollections.java
@@ -37,78 +37,77 @@
 

 public final class HiveRuleCollections {

 

-	public final static LinkedList<IAlgebraicRewriteRule> NORMALIZATION = new LinkedList<IAlgebraicRewriteRule>();

-	static {

-		NORMALIZATION.add(new EliminateSubplanRule());

-		NORMALIZATION.add(new IntroduceAggregateCombinerRule());

-		NORMALIZATION.add(new BreakSelectIntoConjunctsRule());

-		NORMALIZATION.add(new IntroduceAggregateCombinerRule());

-		NORMALIZATION.add(new PushSelectIntoJoinRule());

-		NORMALIZATION.add(new ExtractGbyExpressionsRule());

-		NORMALIZATION.add(new RemoveRedundantSelectRule());

-	}

+    public final static LinkedList<IAlgebraicRewriteRule> NORMALIZATION = new LinkedList<IAlgebraicRewriteRule>();

+    static {

+        NORMALIZATION.add(new EliminateSubplanRule());

+        NORMALIZATION.add(new IntroduceAggregateCombinerRule());

+        NORMALIZATION.add(new BreakSelectIntoConjunctsRule());

+        NORMALIZATION.add(new IntroduceAggregateCombinerRule());

+        NORMALIZATION.add(new PushSelectIntoJoinRule());

+        NORMALIZATION.add(new ExtractGbyExpressionsRule());

+        NORMALIZATION.add(new RemoveRedundantSelectRule());

+    }

 

-	public final static LinkedList<IAlgebraicRewriteRule> COND_PUSHDOWN_AND_JOIN_INFERENCE = new LinkedList<IAlgebraicRewriteRule>();

-	static {

-		COND_PUSHDOWN_AND_JOIN_INFERENCE.add(new PushSelectDownRule());

-		COND_PUSHDOWN_AND_JOIN_INFERENCE.add(new InlineVariablesRule());

-		COND_PUSHDOWN_AND_JOIN_INFERENCE

-				.add(new FactorRedundantGroupAndDecorVarsRule());

-		COND_PUSHDOWN_AND_JOIN_INFERENCE.add(new EliminateSubplanRule());

-	}

+    public final static LinkedList<IAlgebraicRewriteRule> COND_PUSHDOWN_AND_JOIN_INFERENCE = new LinkedList<IAlgebraicRewriteRule>();

+    static {

+        COND_PUSHDOWN_AND_JOIN_INFERENCE.add(new PushSelectDownRule());

+        COND_PUSHDOWN_AND_JOIN_INFERENCE.add(new InlineVariablesRule());

+        COND_PUSHDOWN_AND_JOIN_INFERENCE.add(new FactorRedundantGroupAndDecorVarsRule());

+        COND_PUSHDOWN_AND_JOIN_INFERENCE.add(new EliminateSubplanRule());

+    }

 

-	public final static LinkedList<IAlgebraicRewriteRule> LOAD_FIELDS = new LinkedList<IAlgebraicRewriteRule>();

-	static {

-		// should LoadRecordFieldsRule be applied in only one pass over the

-		// plan?

-		LOAD_FIELDS.add(new InlineVariablesRule());

-		// LOAD_FIELDS.add(new RemoveUnusedAssignAndAggregateRule());

-		LOAD_FIELDS.add(new ComplexJoinInferenceRule());

-		LOAD_FIELDS.add(new InferTypesRule());

-	}

+    public final static LinkedList<IAlgebraicRewriteRule> LOAD_FIELDS = new LinkedList<IAlgebraicRewriteRule>();

+    static {

+        // should LoadRecordFieldsRule be applied in only one pass over the

+        // plan?

+        LOAD_FIELDS.add(new InlineVariablesRule());

+        // LOAD_FIELDS.add(new RemoveUnusedAssignAndAggregateRule());

+        LOAD_FIELDS.add(new ComplexJoinInferenceRule());

+        LOAD_FIELDS.add(new InferTypesRule());

+    }

 

-	public final static LinkedList<IAlgebraicRewriteRule> OP_PUSHDOWN = new LinkedList<IAlgebraicRewriteRule>();

-	static {

-		OP_PUSHDOWN.add(new PushProjectDownRule());

-		OP_PUSHDOWN.add(new PushSelectDownRule());

-	}

+    public final static LinkedList<IAlgebraicRewriteRule> OP_PUSHDOWN = new LinkedList<IAlgebraicRewriteRule>();

+    static {

+        OP_PUSHDOWN.add(new PushProjectDownRule());

+        OP_PUSHDOWN.add(new PushSelectDownRule());

+    }

 

-	public final static LinkedList<IAlgebraicRewriteRule> DATA_EXCHANGE = new LinkedList<IAlgebraicRewriteRule>();

-	static {

-		DATA_EXCHANGE.add(new SetExecutionModeRule());

-	}

+    public final static LinkedList<IAlgebraicRewriteRule> DATA_EXCHANGE = new LinkedList<IAlgebraicRewriteRule>();

+    static {

+        DATA_EXCHANGE.add(new SetExecutionModeRule());

+    }

 

-	public final static LinkedList<IAlgebraicRewriteRule> CONSOLIDATION = new LinkedList<IAlgebraicRewriteRule>();

-	static {

-		CONSOLIDATION.add(new RemoveRedundantProjectionRule());

-		CONSOLIDATION.add(new ConsolidateSelectsRule());

-		CONSOLIDATION.add(new IntroduceEarlyProjectRule());

-		CONSOLIDATION.add(new ConsolidateAssignsRule());

-		CONSOLIDATION.add(new IntroduceGroupByCombinerRule());

-		CONSOLIDATION.add(new RemoveUnusedAssignAndAggregateRule());

-	}

+    public final static LinkedList<IAlgebraicRewriteRule> CONSOLIDATION = new LinkedList<IAlgebraicRewriteRule>();

+    static {

+        CONSOLIDATION.add(new RemoveRedundantProjectionRule());

+        CONSOLIDATION.add(new ConsolidateSelectsRule());

+        CONSOLIDATION.add(new IntroduceEarlyProjectRule());

+        CONSOLIDATION.add(new ConsolidateAssignsRule());

+        CONSOLIDATION.add(new IntroduceGroupByCombinerRule());

+        CONSOLIDATION.add(new RemoveUnusedAssignAndAggregateRule());

+    }

 

-	public final static LinkedList<IAlgebraicRewriteRule> PHYSICAL_PLAN_REWRITES = new LinkedList<IAlgebraicRewriteRule>();

-	static {

-		PHYSICAL_PLAN_REWRITES.add(new PullSelectOutOfEqJoin());

-		PHYSICAL_PLAN_REWRITES.add(new SetAlgebricksPhysicalOperatorsRule());

-		PHYSICAL_PLAN_REWRITES.add(new EnforceStructuralPropertiesRule());

-		PHYSICAL_PLAN_REWRITES.add(new PushProjectDownRule());

-		PHYSICAL_PLAN_REWRITES.add(new SetAlgebricksPhysicalOperatorsRule());

-		PHYSICAL_PLAN_REWRITES.add(new PushLimitDownRule());

-		PHYSICAL_PLAN_REWRITES.add(new InsertProjectBeforeWriteRule());

-		PHYSICAL_PLAN_REWRITES.add(new InsertProjectBeforeUnionRule());

-	}

+    public final static LinkedList<IAlgebraicRewriteRule> PHYSICAL_PLAN_REWRITES = new LinkedList<IAlgebraicRewriteRule>();

+    static {

+        PHYSICAL_PLAN_REWRITES.add(new PullSelectOutOfEqJoin());

+        PHYSICAL_PLAN_REWRITES.add(new SetAlgebricksPhysicalOperatorsRule());

+        PHYSICAL_PLAN_REWRITES.add(new EnforceStructuralPropertiesRule());

+        PHYSICAL_PLAN_REWRITES.add(new PushProjectDownRule());

+        PHYSICAL_PLAN_REWRITES.add(new SetAlgebricksPhysicalOperatorsRule());

+        PHYSICAL_PLAN_REWRITES.add(new PushLimitDownRule());

+        PHYSICAL_PLAN_REWRITES.add(new InsertProjectBeforeWriteRule());

+        PHYSICAL_PLAN_REWRITES.add(new InsertProjectBeforeUnionRule());

+    }

 

-	public final static LinkedList<IAlgebraicRewriteRule> prepareJobGenRules = new LinkedList<IAlgebraicRewriteRule>();

-	static {

-		prepareJobGenRules.add(new ReinferAllTypesRule());

-		prepareJobGenRules.add(new IsolateHyracksOperatorsRule(

-				HeuristicOptimizer.hyraxOperatorsBelowWhichJobGenIsDisabled));

-		prepareJobGenRules.add(new ExtractCommonOperatorsRule());

-		prepareJobGenRules.add(new LocalGroupByRule());

-		prepareJobGenRules.add(new PushProjectIntoDataSourceScanRule());

-		prepareJobGenRules.add(new ReinferAllTypesRule());

-	}

+    public final static LinkedList<IAlgebraicRewriteRule> prepareJobGenRules = new LinkedList<IAlgebraicRewriteRule>();

+    static {

+        prepareJobGenRules.add(new ReinferAllTypesRule());

+        prepareJobGenRules.add(new IsolateHyracksOperatorsRule(

+                HeuristicOptimizer.hyraxOperatorsBelowWhichJobGenIsDisabled));

+        prepareJobGenRules.add(new ExtractCommonOperatorsRule());

+        prepareJobGenRules.add(new LocalGroupByRule());

+        prepareJobGenRules.add(new PushProjectIntoDataSourceScanRule());

+        prepareJobGenRules.add(new ReinferAllTypesRule());

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/InsertProjectBeforeWriteRule.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/InsertProjectBeforeWriteRule.java
index c58982e..90777ee 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/InsertProjectBeforeWriteRule.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/InsertProjectBeforeWriteRule.java
@@ -21,65 +21,59 @@
 
 public class InsertProjectBeforeWriteRule implements IAlgebraicRewriteRule {
 
-	@Override
-	public boolean rewritePost(Mutable<ILogicalOperator> opRef,
-			IOptimizationContext context) {
-		return false;
-	}
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
+        return false;
+    }
 
-	/**
-	 * When the input schema to WriteOperator is different from the output
-	 * schema in terms of variable order, add a project operator to get the
-	 * write order
-	 */
-	@Override
-	public boolean rewritePre(Mutable<ILogicalOperator> opRef,
-			IOptimizationContext context) throws AlgebricksException {
-		AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-		if (op.getOperatorTag() != LogicalOperatorTag.WRITE) {
-			return false;
-		}
-		WriteOperator opWrite = (WriteOperator) op;
-		ArrayList<LogicalVariable> finalSchema = new ArrayList<LogicalVariable>();
-		VariableUtilities.getUsedVariables(opWrite, finalSchema);
-		ArrayList<LogicalVariable> inputSchema = new ArrayList<LogicalVariable>();
-		VariableUtilities.getLiveVariables(opWrite, inputSchema);
-		if (!isIdentical(finalSchema, inputSchema)) {
-			ProjectOperator projectOp = new ProjectOperator(finalSchema);
-			Mutable<ILogicalOperator> parentOpRef = opWrite.getInputs().get(0);
-			projectOp.getInputs().add(parentOpRef);
-			opWrite.getInputs().clear();
-			opWrite.getInputs().add(
-					new MutableObject<ILogicalOperator>(projectOp));
-			projectOp.setPhysicalOperator(new StreamProjectPOperator());
-			projectOp.setExecutionMode(ExecutionMode.PARTITIONED);
+    /**
+     * When the input schema to WriteOperator is different from the output
+     * schema in terms of variable order, add a project operator to get the
+     * write order
+     */
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (op.getOperatorTag() != LogicalOperatorTag.WRITE) {
+            return false;
+        }
+        WriteOperator opWrite = (WriteOperator) op;
+        ArrayList<LogicalVariable> finalSchema = new ArrayList<LogicalVariable>();
+        VariableUtilities.getUsedVariables(opWrite, finalSchema);
+        ArrayList<LogicalVariable> inputSchema = new ArrayList<LogicalVariable>();
+        VariableUtilities.getLiveVariables(opWrite, inputSchema);
+        if (!isIdentical(finalSchema, inputSchema)) {
+            ProjectOperator projectOp = new ProjectOperator(finalSchema);
+            Mutable<ILogicalOperator> parentOpRef = opWrite.getInputs().get(0);
+            projectOp.getInputs().add(parentOpRef);
+            opWrite.getInputs().clear();
+            opWrite.getInputs().add(new MutableObject<ILogicalOperator>(projectOp));
+            projectOp.setPhysicalOperator(new StreamProjectPOperator());
+            projectOp.setExecutionMode(ExecutionMode.PARTITIONED);
 
-			AbstractLogicalOperator op2 = (AbstractLogicalOperator) parentOpRef
-					.getValue();
-			if (op2.getOperatorTag() == LogicalOperatorTag.PROJECT) {
-				ProjectOperator pi2 = (ProjectOperator) op2;
-				parentOpRef.setValue(pi2.getInputs().get(0).getValue());
-			}
-			context.computeAndSetTypeEnvironmentForOperator(projectOp);
-			return true;
-		} else
-			return false;
+            AbstractLogicalOperator op2 = (AbstractLogicalOperator) parentOpRef.getValue();
+            if (op2.getOperatorTag() == LogicalOperatorTag.PROJECT) {
+                ProjectOperator pi2 = (ProjectOperator) op2;
+                parentOpRef.setValue(pi2.getInputs().get(0).getValue());
+            }
+            context.computeAndSetTypeEnvironmentForOperator(projectOp);
+            return true;
+        } else
+            return false;
 
-	}
+    }
 
-	private boolean isIdentical(List<LogicalVariable> finalSchema,
-			List<LogicalVariable> inputSchema) {
-		int finalSchemaSize = finalSchema.size();
-		int inputSchemaSize = inputSchema.size();
-		if (finalSchemaSize != inputSchemaSize)
-			throw new IllegalStateException(
-					"final output schema variables missing!");
-		for (int i = 0; i < finalSchemaSize; i++) {
-			LogicalVariable var1 = finalSchema.get(i);
-			LogicalVariable var2 = inputSchema.get(i);
-			if (!var1.equals(var2))
-				return false;
-		}
-		return true;
-	}
+    private boolean isIdentical(List<LogicalVariable> finalSchema, List<LogicalVariable> inputSchema) {
+        int finalSchemaSize = finalSchema.size();
+        int inputSchemaSize = inputSchema.size();
+        if (finalSchemaSize != inputSchemaSize)
+            throw new IllegalStateException("final output schema variables missing!");
+        for (int i = 0; i < finalSchemaSize; i++) {
+            LogicalVariable var1 = finalSchema.get(i);
+            LogicalVariable var2 = inputSchema.get(i);
+            if (!var1.equals(var2))
+                return false;
+        }
+        return true;
+    }
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/IntroduceEarlyProjectRule.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/IntroduceEarlyProjectRule.java
index 2bebe81..0a18629 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/IntroduceEarlyProjectRule.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/IntroduceEarlyProjectRule.java
@@ -20,58 +20,54 @@
 
 public class IntroduceEarlyProjectRule implements IAlgebraicRewriteRule {
 
-	@Override
-	public boolean rewritePre(Mutable<ILogicalOperator> opRef,
-			IOptimizationContext context) throws AlgebricksException {
-		return false;
-	}
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
 
-	@Override
-	public boolean rewritePost(Mutable<ILogicalOperator> opRef,
-			IOptimizationContext context) throws AlgebricksException {
-		AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-		if (op.getOperatorTag() != LogicalOperatorTag.PROJECT) {
-			return false;
-		}
-		AbstractLogicalOperator middleOp = (AbstractLogicalOperator) op
-				.getInputs().get(0).getValue();
-		List<LogicalVariable> deliveredVars = new ArrayList<LogicalVariable>();
-		List<LogicalVariable> usedVars = new ArrayList<LogicalVariable>();
-		List<LogicalVariable> producedVars = new ArrayList<LogicalVariable>();
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (op.getOperatorTag() != LogicalOperatorTag.PROJECT) {
+            return false;
+        }
+        AbstractLogicalOperator middleOp = (AbstractLogicalOperator) op.getInputs().get(0).getValue();
+        List<LogicalVariable> deliveredVars = new ArrayList<LogicalVariable>();
+        List<LogicalVariable> usedVars = new ArrayList<LogicalVariable>();
+        List<LogicalVariable> producedVars = new ArrayList<LogicalVariable>();
 
-		VariableUtilities.getUsedVariables(op, deliveredVars);
-		VariableUtilities.getUsedVariables(middleOp, usedVars);
-		VariableUtilities.getProducedVariables(middleOp, producedVars);
+        VariableUtilities.getUsedVariables(op, deliveredVars);
+        VariableUtilities.getUsedVariables(middleOp, usedVars);
+        VariableUtilities.getProducedVariables(middleOp, producedVars);
 
-		Set<LogicalVariable> requiredVariables = new HashSet<LogicalVariable>();
-		requiredVariables.addAll(deliveredVars);
-		requiredVariables.addAll(usedVars);
-		requiredVariables.removeAll(producedVars);
+        Set<LogicalVariable> requiredVariables = new HashSet<LogicalVariable>();
+        requiredVariables.addAll(deliveredVars);
+        requiredVariables.addAll(usedVars);
+        requiredVariables.removeAll(producedVars);
 
-		if (middleOp.getInputs().size() <= 0 || middleOp.getInputs().size() > 1)
-			return false;
+        if (middleOp.getInputs().size() <= 0 || middleOp.getInputs().size() > 1)
+            return false;
 
-		AbstractLogicalOperator targetOp = (AbstractLogicalOperator) middleOp
-				.getInputs().get(0).getValue();
-		if (targetOp.getOperatorTag() != LogicalOperatorTag.DATASOURCESCAN)
-			return false;
+        AbstractLogicalOperator targetOp = (AbstractLogicalOperator) middleOp.getInputs().get(0).getValue();
+        if (targetOp.getOperatorTag() != LogicalOperatorTag.DATASOURCESCAN)
+            return false;
 
-		Set<LogicalVariable> deliveredEarlyVars = new HashSet<LogicalVariable>();
-		VariableUtilities.getLiveVariables(targetOp, deliveredEarlyVars);
+        Set<LogicalVariable> deliveredEarlyVars = new HashSet<LogicalVariable>();
+        VariableUtilities.getLiveVariables(targetOp, deliveredEarlyVars);
 
-		deliveredEarlyVars.removeAll(requiredVariables);
-		if (deliveredEarlyVars.size() > 0) {
-			ArrayList<LogicalVariable> requiredVars = new ArrayList<LogicalVariable>();
-			requiredVars.addAll(requiredVariables);
-			ILogicalOperator earlyProjectOp = new ProjectOperator(requiredVars);
-			Mutable<ILogicalOperator> earlyProjectOpRef = new MutableObject<ILogicalOperator>(
-					earlyProjectOp);
-			Mutable<ILogicalOperator> targetRef = middleOp.getInputs().get(0);
-			middleOp.getInputs().set(0, earlyProjectOpRef);
-			earlyProjectOp.getInputs().add(targetRef);
-			context.computeAndSetTypeEnvironmentForOperator(earlyProjectOp);
-			return true;
-		}
-		return false;
-	}
+        deliveredEarlyVars.removeAll(requiredVariables);
+        if (deliveredEarlyVars.size() > 0) {
+            ArrayList<LogicalVariable> requiredVars = new ArrayList<LogicalVariable>();
+            requiredVars.addAll(requiredVariables);
+            ILogicalOperator earlyProjectOp = new ProjectOperator(requiredVars);
+            Mutable<ILogicalOperator> earlyProjectOpRef = new MutableObject<ILogicalOperator>(earlyProjectOp);
+            Mutable<ILogicalOperator> targetRef = middleOp.getInputs().get(0);
+            middleOp.getInputs().set(0, earlyProjectOpRef);
+            earlyProjectOp.getInputs().add(targetRef);
+            context.computeAndSetTypeEnvironmentForOperator(earlyProjectOp);
+            return true;
+        }
+        return false;
+    }
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/LocalGroupByRule.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/LocalGroupByRule.java
index 72cbe21..90ca008 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/LocalGroupByRule.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/LocalGroupByRule.java
@@ -17,55 +17,50 @@
 
 public class LocalGroupByRule implements IAlgebraicRewriteRule {
 
-	@Override
-	public boolean rewritePre(Mutable<ILogicalOperator> opRef,
-			IOptimizationContext context) throws AlgebricksException {
-		return false;
-	}
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
 
-	@Override
-	public boolean rewritePost(Mutable<ILogicalOperator> opRef,
-			IOptimizationContext context) throws AlgebricksException {
-		AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-		if (op.getOperatorTag() != LogicalOperatorTag.GROUP) {
-			return false;
-		}
-		Boolean localGby = (Boolean) op.getAnnotations().get(
-				HiveOperatorAnnotations.LOCAL_GROUP_BY);
-		if (localGby != null && localGby.equals(Boolean.TRUE)) {
-			Boolean hashGby = (Boolean) op.getAnnotations().get(
-					OperatorAnnotations.USE_HASH_GROUP_BY);
-			Boolean externalGby = (Boolean) op.getAnnotations().get(
-					OperatorAnnotations.USE_EXTERNAL_GROUP_BY);
-			if ((hashGby != null && (hashGby.equals(Boolean.TRUE)) || (externalGby != null && externalGby
-					.equals(Boolean.TRUE)))) {
-				reviseExchange(op);
-			} else {
-				ILogicalOperator child = op.getInputs().get(0).getValue();
-				AbstractLogicalOperator childOp = (AbstractLogicalOperator) child;
-				while (child.getInputs().size() > 0) {
-					if (childOp.getOperatorTag() == LogicalOperatorTag.ORDER)
-						break;
-					else {
-						child = child.getInputs().get(0).getValue();
-						childOp = (AbstractLogicalOperator) child;
-					}
-				}
-				if (childOp.getOperatorTag() == LogicalOperatorTag.ORDER)
-					reviseExchange(childOp);
-			}
-			return true;
-		}
-		return false;
-	}
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (op.getOperatorTag() != LogicalOperatorTag.GROUP) {
+            return false;
+        }
+        Boolean localGby = (Boolean) op.getAnnotations().get(HiveOperatorAnnotations.LOCAL_GROUP_BY);
+        if (localGby != null && localGby.equals(Boolean.TRUE)) {
+            Boolean hashGby = (Boolean) op.getAnnotations().get(OperatorAnnotations.USE_HASH_GROUP_BY);
+            Boolean externalGby = (Boolean) op.getAnnotations().get(OperatorAnnotations.USE_EXTERNAL_GROUP_BY);
+            if ((hashGby != null && (hashGby.equals(Boolean.TRUE)) || (externalGby != null && externalGby
+                    .equals(Boolean.TRUE)))) {
+                reviseExchange(op);
+            } else {
+                ILogicalOperator child = op.getInputs().get(0).getValue();
+                AbstractLogicalOperator childOp = (AbstractLogicalOperator) child;
+                while (child.getInputs().size() > 0) {
+                    if (childOp.getOperatorTag() == LogicalOperatorTag.ORDER)
+                        break;
+                    else {
+                        child = child.getInputs().get(0).getValue();
+                        childOp = (AbstractLogicalOperator) child;
+                    }
+                }
+                if (childOp.getOperatorTag() == LogicalOperatorTag.ORDER)
+                    reviseExchange(childOp);
+            }
+            return true;
+        }
+        return false;
+    }
 
-	private void reviseExchange(AbstractLogicalOperator op) {
-		ExchangeOperator exchange = (ExchangeOperator) op.getInputs().get(0)
-				.getValue();
-		IPhysicalOperator physicalOp = exchange.getPhysicalOperator();
-		if (physicalOp.getOperatorTag() == PhysicalOperatorTag.HASH_PARTITION_EXCHANGE) {
-			exchange.setPhysicalOperator(new OneToOneExchangePOperator());
-		}
-	}
+    private void reviseExchange(AbstractLogicalOperator op) {
+        ExchangeOperator exchange = (ExchangeOperator) op.getInputs().get(0).getValue();
+        IPhysicalOperator physicalOp = exchange.getPhysicalOperator();
+        if (physicalOp.getOperatorTag() == PhysicalOperatorTag.HASH_PARTITION_EXCHANGE) {
+            exchange.setPhysicalOperator(new OneToOneExchangePOperator());
+        }
+    }
 
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/RemoveRedundantSelectRule.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/RemoveRedundantSelectRule.java
index 9958ba8..44ff12d 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/RemoveRedundantSelectRule.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/RemoveRedundantSelectRule.java
@@ -13,34 +13,32 @@
 
 public class RemoveRedundantSelectRule implements IAlgebraicRewriteRule {
 
-	@Override
-	public boolean rewritePre(Mutable<ILogicalOperator> opRef,
-			IOptimizationContext context) throws AlgebricksException {
-		return false;
-	}
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
 
-	@Override
-	public boolean rewritePost(Mutable<ILogicalOperator> opRef,
-			IOptimizationContext context) throws AlgebricksException {
-		AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-		if (op.getOperatorTag() != LogicalOperatorTag.SELECT) {
-			return false;
-		}
-		AbstractLogicalOperator inputOp = (AbstractLogicalOperator) op
-				.getInputs().get(0).getValue();
-		if (inputOp.getOperatorTag() != LogicalOperatorTag.SELECT) {
-			return false;
-		}
-		SelectOperator selectOp = (SelectOperator) op;
-		SelectOperator inputSelectOp = (SelectOperator) inputOp;
-		ILogicalExpression expr1 = selectOp.getCondition().getValue();
-		ILogicalExpression expr2 = inputSelectOp.getCondition().getValue();
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (op.getOperatorTag() != LogicalOperatorTag.SELECT) {
+            return false;
+        }
+        AbstractLogicalOperator inputOp = (AbstractLogicalOperator) op.getInputs().get(0).getValue();
+        if (inputOp.getOperatorTag() != LogicalOperatorTag.SELECT) {
+            return false;
+        }
+        SelectOperator selectOp = (SelectOperator) op;
+        SelectOperator inputSelectOp = (SelectOperator) inputOp;
+        ILogicalExpression expr1 = selectOp.getCondition().getValue();
+        ILogicalExpression expr2 = inputSelectOp.getCondition().getValue();
 
-		if (expr1.equals(expr2)) {
-			selectOp.getInputs().set(0, inputSelectOp.getInputs().get(0));
-			return true;
-		}
-		return false;
-	}
+        if (expr1.equals(expr2)) {
+            selectOp.getInputs().set(0, inputSelectOp.getInputs().get(0));
+            return true;
+        }
+        return false;
+    }
 
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/config/ConfUtil.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/config/ConfUtil.java
index 6b4d697..9ed1558 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/config/ConfUtil.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/config/ConfUtil.java
@@ -22,123 +22,118 @@
 @SuppressWarnings({ "rawtypes", "deprecation" })

 public class ConfUtil {

 

-	private static JobConf job;

-	private static HiveConf hconf;

-	private static String[] NCs;

-	private static Map<String, List<String>> ncMapping;

-	private static IHyracksClientConnection hcc = null;

-	private static ClusterTopology topology = null;

+    private static JobConf job;

+    private static HiveConf hconf;

+    private static String[] NCs;

+    private static Map<String, List<String>> ncMapping;

+    private static IHyracksClientConnection hcc = null;

+    private static ClusterTopology topology = null;

 

-	public static JobConf getJobConf(Class<? extends InputFormat> format,

-			Path path) {

-		JobConf conf = new JobConf();

-		if (job != null)

-			conf = job;

+    public static JobConf getJobConf(Class<? extends InputFormat> format, Path path) {

+        JobConf conf = new JobConf();

+        if (job != null)

+            conf = job;

 

-		String hadoopPath = System.getProperty("HADOOP_HOME", "/hadoop");

-		Path pathCore = new Path(hadoopPath + "/conf/core-site.xml");

-		conf.addResource(pathCore);

-		Path pathMapRed = new Path(hadoopPath + "/conf/mapred-site.xml");

-		conf.addResource(pathMapRed);

-		Path pathHDFS = new Path(hadoopPath + "/conf/hdfs-site.xml");

-		conf.addResource(pathHDFS);

+        String hadoopPath = System.getProperty("HADOOP_HOME", "/hadoop");

+        Path pathCore = new Path(hadoopPath + "/conf/core-site.xml");

+        conf.addResource(pathCore);

+        Path pathMapRed = new Path(hadoopPath + "/conf/mapred-site.xml");

+        conf.addResource(pathMapRed);

+        Path pathHDFS = new Path(hadoopPath + "/conf/hdfs-site.xml");

+        conf.addResource(pathHDFS);

 

-		conf.setInputFormat(format);

-		FileInputFormat.setInputPaths(conf, path);

-		return conf;

-	}

+        conf.setInputFormat(format);

+        FileInputFormat.setInputPaths(conf, path);

+        return conf;

+    }

 

-	public static JobConf getJobConf() {

-		JobConf conf = new JobConf();

-		if (job != null)

-			conf = job;

+    public static JobConf getJobConf() {

+        JobConf conf = new JobConf();

+        if (job != null)

+            conf = job;

 

-		String hadoopPath = System.getProperty("HADOOP_HOME", "/hadoop");

-		Path pathCore = new Path(hadoopPath + "/conf/core-site.xml");

-		conf.addResource(pathCore);

-		Path pathMapRed = new Path(hadoopPath + "/conf/mapred-site.xml");

-		conf.addResource(pathMapRed);

-		Path pathHDFS = new Path(hadoopPath + "/conf/hdfs-site.xml");

-		conf.addResource(pathHDFS);

+        String hadoopPath = System.getProperty("HADOOP_HOME", "/hadoop");

+        Path pathCore = new Path(hadoopPath + "/conf/core-site.xml");

+        conf.addResource(pathCore);

+        Path pathMapRed = new Path(hadoopPath + "/conf/mapred-site.xml");

+        conf.addResource(pathMapRed);

+        Path pathHDFS = new Path(hadoopPath + "/conf/hdfs-site.xml");

+        conf.addResource(pathHDFS);

 

-		return conf;

-	}

+        return conf;

+    }

 

-	public static void setJobConf(JobConf conf) {

-		job = conf;

-	}

+    public static void setJobConf(JobConf conf) {

+        job = conf;

+    }

 

-	public static void setHiveConf(HiveConf hiveConf) {

-		hconf = hiveConf;

-	}

+    public static void setHiveConf(HiveConf hiveConf) {

+        hconf = hiveConf;

+    }

 

-	public static HiveConf getHiveConf() {

-		if (hconf == null) {

-			hconf = new HiveConf(SessionState.class);

-			hconf.addResource(new Path("conf/hive-default.xml"));

-		}

-		return hconf;

-	}

+    public static HiveConf getHiveConf() {

+        if (hconf == null) {

+            hconf = new HiveConf(SessionState.class);

+            hconf.addResource(new Path("conf/hive-default.xml"));

+        }

+        return hconf;

+    }

 

-	public static String[] getNCs() throws AlgebricksException {

-		if (NCs == null) {

-			try {

-				loadClusterConfig();

-			} catch (Exception e) {

-				throw new AlgebricksException(e);

-			}

-		}

-		return NCs;

-	}

+    public static String[] getNCs() throws AlgebricksException {

+        if (NCs == null) {

+            try {

+                loadClusterConfig();

+            } catch (Exception e) {

+                throw new AlgebricksException(e);

+            }

+        }

+        return NCs;

+    }

 

-	public static Map<String, List<String>> getNCMapping()

-			throws AlgebricksException {

-		if (ncMapping == null) {

-			try {

-				loadClusterConfig();

-			} catch (Exception e) {

-				throw new AlgebricksException(e);

-			}

-		}

-		return ncMapping;

-	}

+    public static Map<String, List<String>> getNCMapping() throws AlgebricksException {

+        if (ncMapping == null) {

+            try {

+                loadClusterConfig();

+            } catch (Exception e) {

+                throw new AlgebricksException(e);

+            }

+        }

+        return ncMapping;

+    }

 

-	private static void loadClusterConfig() {

-		try {

-			getHiveConf();

-			String ipAddress = hconf.get("hive.hyracks.host");

-			int port = Integer.parseInt(hconf.get("hive.hyracks.port"));

-			int mpl = Integer.parseInt(hconf.get("hive.hyracks.parrallelism"));

-			hcc = new HyracksConnection(ipAddress, port);

-			topology = hcc.getClusterTopology();

-			Map<String, NodeControllerInfo> ncNameToNcInfos = hcc

-					.getNodeControllerInfos();

-			NCs = new String[ncNameToNcInfos.size() * mpl];

-			ncMapping = new HashMap<String, List<String>>();

-			int i = 0;

-			for (Map.Entry<String, NodeControllerInfo> entry : ncNameToNcInfos

-					.entrySet()) {

-				String ipAddr = InetAddress.getByAddress(

-						entry.getValue().getNetworkAddress().getIpAddress())

-						.getHostAddress();

-				List<String> matchedNCs = ncMapping.get(ipAddr);

-				if (matchedNCs == null) {

-					matchedNCs = new ArrayList<String>();

-					ncMapping.put(ipAddr, matchedNCs);

-				}

-				matchedNCs.add(entry.getKey());

-				for (int j = i * mpl; j < i * mpl + mpl; j++)

-					NCs[j] = entry.getKey();

-				i++;

-			}

-		} catch (Exception e) {

-			throw new IllegalStateException(e);

-		}

-	}

+    private static void loadClusterConfig() {

+        try {

+            getHiveConf();

+            String ipAddress = hconf.get("hive.hyracks.host");

+            int port = Integer.parseInt(hconf.get("hive.hyracks.port"));

+            int mpl = Integer.parseInt(hconf.get("hive.hyracks.parrallelism"));

+            hcc = new HyracksConnection(ipAddress, port);

+            topology = hcc.getClusterTopology();

+            Map<String, NodeControllerInfo> ncNameToNcInfos = hcc.getNodeControllerInfos();

+            NCs = new String[ncNameToNcInfos.size() * mpl];

+            ncMapping = new HashMap<String, List<String>>();

+            int i = 0;

+            for (Map.Entry<String, NodeControllerInfo> entry : ncNameToNcInfos.entrySet()) {

+                String ipAddr = InetAddress.getByAddress(entry.getValue().getNetworkAddress().getIpAddress())

+                        .getHostAddress();

+                List<String> matchedNCs = ncMapping.get(ipAddr);

+                if (matchedNCs == null) {

+                    matchedNCs = new ArrayList<String>();

+                    ncMapping.put(ipAddr, matchedNCs);

+                }

+                matchedNCs.add(entry.getKey());

+                for (int j = i * mpl; j < i * mpl + mpl; j++)

+                    NCs[j] = entry.getKey();

+                i++;

+            }

+        } catch (Exception e) {

+            throw new IllegalStateException(e);

+        }

+    }

 

-	public static ClusterTopology getClusterTopology() {

-		if (topology == null)

-			loadClusterConfig();

-		return topology;

-	}

+    public static ClusterTopology getClusterTopology() {

+        if (topology == null)

+            loadClusterConfig();

+        return topology;

+    }

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/AbstractExpressionEvaluator.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/AbstractExpressionEvaluator.java
index 8f6d9ca..ad02239 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/AbstractExpressionEvaluator.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/AbstractExpressionEvaluator.java
@@ -22,153 +22,148 @@
 

 public abstract class AbstractExpressionEvaluator implements ICopyEvaluator {

 

-	private List<ICopyEvaluator> children;

+    private List<ICopyEvaluator> children;

 

-	private ExprNodeEvaluator evaluator;

+    private ExprNodeEvaluator evaluator;

 

-	private IDataOutputProvider out;

+    private IDataOutputProvider out;

 

-	private ObjectInspector inspector;

+    private ObjectInspector inspector;

 

-	/**

-	 * output object inspector

-	 */

-	private ObjectInspector outputInspector;

+    /**

+     * output object inspector

+     */

+    private ObjectInspector outputInspector;

 

-	/**

-	 * cached row object

-	 */

-	private LazyObject<? extends ObjectInspector> cachedRowObject;

+    /**

+     * cached row object

+     */

+    private LazyObject<? extends ObjectInspector> cachedRowObject;

 

-	/**

-	 * serializer/derialzer for lazy object

-	 */

-	private SerDe lazySer;

+    /**

+     * serializer/derialzer for lazy object

+     */

+    private SerDe lazySer;

 

-	/**

-	 * data output

-	 */

-	DataOutput dataOutput;

+    /**

+     * data output

+     */

+    DataOutput dataOutput;

 

-	public AbstractExpressionEvaluator(ExprNodeEvaluator hiveEvaluator,

-			ObjectInspector oi, IDataOutputProvider output)

-			throws AlgebricksException {

-		evaluator = hiveEvaluator;

-		out = output;

-		inspector = oi;

-		dataOutput = out.getDataOutput();

-	}

+    public AbstractExpressionEvaluator(ExprNodeEvaluator hiveEvaluator, ObjectInspector oi, IDataOutputProvider output)

+            throws AlgebricksException {

+        evaluator = hiveEvaluator;

+        out = output;

+        inspector = oi;

+        dataOutput = out.getDataOutput();

+    }

 

-	protected ObjectInspector getRowInspector() {

-		return null;

-	}

+    protected ObjectInspector getRowInspector() {

+        return null;

+    }

 

-	protected IDataOutputProvider getIDataOutputProvider() {

-		return out;

-	}

+    protected IDataOutputProvider getIDataOutputProvider() {

+        return out;

+    }

 

-	protected ExprNodeEvaluator getHiveEvaluator() {

-		return evaluator;

-	}

+    protected ExprNodeEvaluator getHiveEvaluator() {

+        return evaluator;

+    }

 

-	public ObjectInspector getObjectInspector() {

-		return inspector;

-	}

+    public ObjectInspector getObjectInspector() {

+        return inspector;

+    }

 

-	@Override

-	public void evaluate(IFrameTupleReference r) throws AlgebricksException {

-		// initialize hive evaluator

-		try {

-			if (outputInspector == null)

-				outputInspector = evaluator.initialize(inspector);

-		} catch (Exception e) {

-			e.printStackTrace();

-			throw new AlgebricksException(e.getMessage());

-		}

+    @Override

+    public void evaluate(IFrameTupleReference r) throws AlgebricksException {

+        // initialize hive evaluator

+        try {

+            if (outputInspector == null)

+                outputInspector = evaluator.initialize(inspector);

+        } catch (Exception e) {

+            e.printStackTrace();

+            throw new AlgebricksException(e.getMessage());

+        }

 

-		readIntoCache(r);

-		try {

-			Object result = evaluator.evaluate(cachedRowObject);

+        readIntoCache(r);

+        try {

+            Object result = evaluator.evaluate(cachedRowObject);

 

-			// if (result == null) {

-			// result = evaluator.evaluate(cachedRowObject);

-			//

-			// // check if result is null

-			//

-			// String errorMsg = "serialize null object in  \n output " +

-			// outputInspector.toString() + " \n input "

-			// + inspector.toString() + "\n ";

-			// errorMsg += "";

-			// List<Object> columns = ((StructObjectInspector)

-			// inspector).getStructFieldsDataAsList(cachedRowObject);

-			// for (Object column : columns) {

-			// errorMsg += column.toString() + " ";

-			// }

-			// errorMsg += "\n";

-			// Log.info(errorMsg);

-			// System.out.println(errorMsg);

-			// // result = new BooleanWritable(true);

-			// throw new IllegalStateException(errorMsg);

-			// }

+            // if (result == null) {

+            // result = evaluator.evaluate(cachedRowObject);

+            //

+            // // check if result is null

+            //

+            // String errorMsg = "serialize null object in  \n output " +

+            // outputInspector.toString() + " \n input "

+            // + inspector.toString() + "\n ";

+            // errorMsg += "";

+            // List<Object> columns = ((StructObjectInspector)

+            // inspector).getStructFieldsDataAsList(cachedRowObject);

+            // for (Object column : columns) {

+            // errorMsg += column.toString() + " ";

+            // }

+            // errorMsg += "\n";

+            // Log.info(errorMsg);

+            // System.out.println(errorMsg);

+            // // result = new BooleanWritable(true);

+            // throw new IllegalStateException(errorMsg);

+            // }

 

-			serializeResult(result);

-		} catch (HiveException e) {

-			e.printStackTrace();

-			throw new AlgebricksException(e.getMessage());

-		} catch (IOException e) {

-			e.printStackTrace();

-			throw new AlgebricksException(e.getMessage());

-		}

-	}

+            serializeResult(result);

+        } catch (HiveException e) {

+            e.printStackTrace();

+            throw new AlgebricksException(e.getMessage());

+        } catch (IOException e) {

+            e.printStackTrace();

+            throw new AlgebricksException(e.getMessage());

+        }

+    }

 

-	/**

-	 * serialize the result

-	 * 

-	 * @param result

-	 *            the evaluation result

-	 * @throws IOException

-	 * @throws AlgebricksException

-	 */

-	private void serializeResult(Object result) throws IOException,

-			AlgebricksException {

-		if (lazySer == null)

-			lazySer = new LazySerDe();

+    /**

+     * serialize the result

+     * 

+     * @param result

+     *            the evaluation result

+     * @throws IOException

+     * @throws AlgebricksException

+     */

+    private void serializeResult(Object result) throws IOException, AlgebricksException {

+        if (lazySer == null)

+            lazySer = new LazySerDe();

 

-		try {

-			BytesWritable outputWritable = (BytesWritable) lazySer.serialize(

-					result, outputInspector);

-			dataOutput.write(outputWritable.getBytes(), 0,

-					outputWritable.getLength());

-		} catch (SerDeException e) {

-			throw new AlgebricksException(e);

-		}

-	}

+        try {

+            BytesWritable outputWritable = (BytesWritable) lazySer.serialize(result, outputInspector);

+            dataOutput.write(outputWritable.getBytes(), 0, outputWritable.getLength());

+        } catch (SerDeException e) {

+            throw new AlgebricksException(e);

+        }

+    }

 

-	/**

-	 * bind the tuple reference to the cached row object

-	 * 

-	 * @param r

-	 */

-	private void readIntoCache(IFrameTupleReference r) {

-		if (cachedRowObject == null)

-			cachedRowObject = (LazyObject<? extends ObjectInspector>) LazyFactory

-					.createLazyObject(inspector);

-		cachedRowObject.init(r);

-	}

+    /**

+     * bind the tuple reference to the cached row object

+     * 

+     * @param r

+     */

+    private void readIntoCache(IFrameTupleReference r) {

+        if (cachedRowObject == null)

+            cachedRowObject = (LazyObject<? extends ObjectInspector>) LazyFactory.createLazyObject(inspector);

+        cachedRowObject.init(r);

+    }

 

-	/**

-	 * set a list of children of this evaluator

-	 * 

-	 * @param children

-	 */

-	public void setChildren(List<ICopyEvaluator> children) {

-		this.children = children;

-	}

+    /**

+     * set a list of children of this evaluator

+     * 

+     * @param children

+     */

+    public void setChildren(List<ICopyEvaluator> children) {

+        this.children = children;

+    }

 

-	public void addChild(ICopyEvaluator child) {

-		if (children == null)

-			children = new ArrayList<ICopyEvaluator>();

-		children.add(child);

-	}

+    public void addChild(ICopyEvaluator child) {

+        if (children == null)

+            children = new ArrayList<ICopyEvaluator>();

+        children.add(child);

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/AggregationFunctionEvaluator.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/AggregationFunctionEvaluator.java
index 271b5e4..e500376 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/AggregationFunctionEvaluator.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/AggregationFunctionEvaluator.java
@@ -24,208 +24,201 @@
 

 public class AggregationFunctionEvaluator implements ICopyAggregateFunction {

 

-	/**

-	 * the mode of aggregation function

-	 */

-	private GenericUDAFEvaluator.Mode mode;

+    /**

+     * the mode of aggregation function

+     */

+    private GenericUDAFEvaluator.Mode mode;

 

-	/**

-	 * an array of evaluators

-	 */

-	private ExprNodeEvaluator[] evaluators;

+    /**

+     * an array of evaluators

+     */

+    private ExprNodeEvaluator[] evaluators;

 

-	/**

-	 * udaf evaluator partial

-	 */

-	private GenericUDAFEvaluator udafPartial;

+    /**

+     * udaf evaluator partial

+     */

+    private GenericUDAFEvaluator udafPartial;

 

-	/**

-	 * udaf evaluator complete

-	 */

-	private GenericUDAFEvaluator udafComplete;

+    /**

+     * udaf evaluator complete

+     */

+    private GenericUDAFEvaluator udafComplete;

 

-	/**

-	 * cached parameter objects

-	 */

-	private Object[] cachedParameters;

+    /**

+     * cached parameter objects

+     */

+    private Object[] cachedParameters;

 

-	/**

-	 * cached row objects

-	 */

-	private LazyObject<? extends ObjectInspector> cachedRowObject;

+    /**

+     * cached row objects

+     */

+    private LazyObject<? extends ObjectInspector> cachedRowObject;

 

-	/**

-	 * the output channel

-	 */

-	private DataOutput out;

+    /**

+     * the output channel

+     */

+    private DataOutput out;

 

-	/**

-	 * aggregation buffer

-	 */

-	private AggregationBuffer aggBuffer;

+    /**

+     * aggregation buffer

+     */

+    private AggregationBuffer aggBuffer;

 

-	/**

-	 * we only use lazy serde to do serialization

-	 */

-	private SerDe lazySer;

+    /**

+     * we only use lazy serde to do serialization

+     */

+    private SerDe lazySer;

 

-	/**

-	 * the output object inspector for this aggregation function

-	 */

-	private ObjectInspector outputInspector;

+    /**

+     * the output object inspector for this aggregation function

+     */

+    private ObjectInspector outputInspector;

 

-	/**

-	 * the output object inspector for this aggregation function

-	 */

-	private ObjectInspector outputInspectorPartial;

+    /**

+     * the output object inspector for this aggregation function

+     */

+    private ObjectInspector outputInspectorPartial;

 

-	/**

-	 * parameter inspectors

-	 */

-	private ObjectInspector[] parameterInspectors;

+    /**

+     * parameter inspectors

+     */

+    private ObjectInspector[] parameterInspectors;

 

-	/**

-	 * output make sure the aggregation functio has least object creation

-	 * 

-	 * @param desc

-	 * @param oi

-	 * @param output

-	 */

-	public AggregationFunctionEvaluator(List<ExprNodeDesc> inputs,

-			List<TypeInfo> inputTypes, String genericUDAFName,

-			GenericUDAFEvaluator.Mode aggMode, boolean distinct,

-			ObjectInspector oi, DataOutput output, ExprNodeEvaluator[] evals,

-			ObjectInspector[] pInspectors, Object[] parameterCache,

-			SerDe serde, LazyObject<? extends ObjectInspector> row,

-			GenericUDAFEvaluator udafunctionPartial,

-			GenericUDAFEvaluator udafunctionComplete, ObjectInspector outputOi,

-			ObjectInspector outputOiPartial) {

-		// shared object across threads

-		this.out = output;

-		this.mode = aggMode;

-		this.parameterInspectors = pInspectors;

+    /**

+     * output make sure the aggregation functio has least object creation

+     * 

+     * @param desc

+     * @param oi

+     * @param output

+     */

+    public AggregationFunctionEvaluator(List<ExprNodeDesc> inputs, List<TypeInfo> inputTypes, String genericUDAFName,

+            GenericUDAFEvaluator.Mode aggMode, boolean distinct, ObjectInspector oi, DataOutput output,

+            ExprNodeEvaluator[] evals, ObjectInspector[] pInspectors, Object[] parameterCache, SerDe serde,

+            LazyObject<? extends ObjectInspector> row, GenericUDAFEvaluator udafunctionPartial,

+            GenericUDAFEvaluator udafunctionComplete, ObjectInspector outputOi, ObjectInspector outputOiPartial) {

+        // shared object across threads

+        this.out = output;

+        this.mode = aggMode;

+        this.parameterInspectors = pInspectors;

 

-		// thread local objects

-		this.evaluators = evals;

-		this.cachedParameters = parameterCache;

-		this.cachedRowObject = row;

-		this.lazySer = serde;

-		this.udafPartial = udafunctionPartial;

-		this.udafComplete = udafunctionComplete;

-		this.outputInspector = outputOi;

-		this.outputInspectorPartial = outputOiPartial;

-	}

+        // thread local objects

+        this.evaluators = evals;

+        this.cachedParameters = parameterCache;

+        this.cachedRowObject = row;

+        this.lazySer = serde;

+        this.udafPartial = udafunctionPartial;

+        this.udafComplete = udafunctionComplete;

+        this.outputInspector = outputOi;

+        this.outputInspectorPartial = outputOiPartial;

+    }

 

-	@Override

-	public void init() throws AlgebricksException {

-		try {

-			aggBuffer = udafPartial.getNewAggregationBuffer();

-		} catch (HiveException e) {

-			throw new AlgebricksException(e);

-		}

-	}

+    @Override

+    public void init() throws AlgebricksException {

+        try {

+            aggBuffer = udafPartial.getNewAggregationBuffer();

+        } catch (HiveException e) {

+            throw new AlgebricksException(e);

+        }

+    }

 

-	@Override

-	public void step(IFrameTupleReference tuple) throws AlgebricksException {

-		readIntoCache(tuple);

-		processRow();

-	}

+    @Override

+    public void step(IFrameTupleReference tuple) throws AlgebricksException {

+        readIntoCache(tuple);

+        processRow();

+    }

 

-	private void processRow() throws AlgebricksException {

-		try {

-			// get values by evaluating them

-			for (int i = 0; i < cachedParameters.length; i++) {

-				cachedParameters[i] = evaluators[i].evaluate(cachedRowObject);

-			}

-			processAggregate();

-		} catch (HiveException e) {

-			throw new AlgebricksException(e);

-		}

-	}

+    private void processRow() throws AlgebricksException {

+        try {

+            // get values by evaluating them

+            for (int i = 0; i < cachedParameters.length; i++) {

+                cachedParameters[i] = evaluators[i].evaluate(cachedRowObject);

+            }

+            processAggregate();

+        } catch (HiveException e) {

+            throw new AlgebricksException(e);

+        }

+    }

 

-	private void processAggregate() throws HiveException {

-		/**

-		 * accumulate the aggregation function

-		 */

-		switch (mode) {

-		case PARTIAL1:

-		case COMPLETE:

-			udafPartial.iterate(aggBuffer, cachedParameters);

-			break;

-		case PARTIAL2:

-		case FINAL:

-			if (udafPartial instanceof GenericUDAFCount.GenericUDAFCountEvaluator) {

-				Object parameter = ((PrimitiveObjectInspector) parameterInspectors[0])

-						.getPrimitiveWritableObject(cachedParameters[0]);

-				udafPartial.merge(aggBuffer, parameter);

-			} else

-				udafPartial.merge(aggBuffer, cachedParameters[0]);

-			break;

-		default:

-			break;

-		}

-	}

+    private void processAggregate() throws HiveException {

+        /**

+         * accumulate the aggregation function

+         */

+        switch (mode) {

+            case PARTIAL1:

+            case COMPLETE:

+                udafPartial.iterate(aggBuffer, cachedParameters);

+                break;

+            case PARTIAL2:

+            case FINAL:

+                if (udafPartial instanceof GenericUDAFCount.GenericUDAFCountEvaluator) {

+                    Object parameter = ((PrimitiveObjectInspector) parameterInspectors[0])

+                            .getPrimitiveWritableObject(cachedParameters[0]);

+                    udafPartial.merge(aggBuffer, parameter);

+                } else

+                    udafPartial.merge(aggBuffer, cachedParameters[0]);

+                break;

+            default:

+                break;

+        }

+    }

 

-	/**

-	 * serialize the result

-	 * 

-	 * @param result

-	 *            the evaluation result

-	 * @throws IOException

-	 * @throws AlgebricksException

-	 */

-	private void serializeResult(Object result, ObjectInspector oi)

-			throws IOException, AlgebricksException {

-		try {

-			BytesWritable outputWritable = (BytesWritable) lazySer.serialize(

-					result, oi);

-			out.write(outputWritable.getBytes(), 0, outputWritable.getLength());

-		} catch (SerDeException e) {

-			throw new AlgebricksException(e);

-		}

-	}

+    /**

+     * serialize the result

+     * 

+     * @param result

+     *            the evaluation result

+     * @throws IOException

+     * @throws AlgebricksException

+     */

+    private void serializeResult(Object result, ObjectInspector oi) throws IOException, AlgebricksException {

+        try {

+            BytesWritable outputWritable = (BytesWritable) lazySer.serialize(result, oi);

+            out.write(outputWritable.getBytes(), 0, outputWritable.getLength());

+        } catch (SerDeException e) {

+            throw new AlgebricksException(e);

+        }

+    }

 

-	/**

-	 * bind the tuple reference to the cached row object

-	 * 

-	 * @param r

-	 */

-	private void readIntoCache(IFrameTupleReference r) {

-		cachedRowObject.init(r);

-	}

+    /**

+     * bind the tuple reference to the cached row object

+     * 

+     * @param r

+     */

+    private void readIntoCache(IFrameTupleReference r) {

+        cachedRowObject.init(r);

+    }

 

-	@Override

-	public void finish() throws AlgebricksException {

-		// aggregator

-		try {

-			Object result = null;

-			result = udafPartial.terminatePartial(aggBuffer);

-			if (mode == GenericUDAFEvaluator.Mode.COMPLETE

-					|| mode == GenericUDAFEvaluator.Mode.FINAL) {

-				result = udafComplete.terminate(aggBuffer);

-				serializeResult(result, outputInspector);

-			} else {

-				serializeResult(result, outputInspectorPartial);

-			}

-		} catch (HiveException e) {

-			throw new AlgebricksException(e);

-		} catch (IOException e) {

-			throw new AlgebricksException(e);

-		}

-	}

+    @Override

+    public void finish() throws AlgebricksException {

+        // aggregator

+        try {

+            Object result = null;

+            result = udafPartial.terminatePartial(aggBuffer);

+            if (mode == GenericUDAFEvaluator.Mode.COMPLETE || mode == GenericUDAFEvaluator.Mode.FINAL) {

+                result = udafComplete.terminate(aggBuffer);

+                serializeResult(result, outputInspector);

+            } else {

+                serializeResult(result, outputInspectorPartial);

+            }

+        } catch (HiveException e) {

+            throw new AlgebricksException(e);

+        } catch (IOException e) {

+            throw new AlgebricksException(e);

+        }

+    }

 

-	@Override

-	public void finishPartial() throws AlgebricksException {

-		// aggregator.

-		try {

-			Object result = null;

-			// get aggregations

-			result = udafPartial.terminatePartial(aggBuffer);

-			serializeResult(result, outputInspectorPartial);

-		} catch (HiveException e) {

-			throw new AlgebricksException(e);

-		} catch (IOException e) {

-			throw new AlgebricksException(e);

-		}

-	}

+    @Override

+    public void finishPartial() throws AlgebricksException {

+        // aggregator.

+        try {

+            Object result = null;

+            // get aggregations

+            result = udafPartial.terminatePartial(aggBuffer);

+            serializeResult(result, outputInspectorPartial);

+        } catch (HiveException e) {

+            throw new AlgebricksException(e);

+        } catch (IOException e) {

+            throw new AlgebricksException(e);

+        }

+    }

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/AggregatuibFunctionSerializableEvaluator.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/AggregatuibFunctionSerializableEvaluator.java
index 032437b..1933253 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/AggregatuibFunctionSerializableEvaluator.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/AggregatuibFunctionSerializableEvaluator.java
@@ -21,239 +21,228 @@
 import edu.uci.ics.hyracks.algebricks.runtime.base.ICopySerializableAggregateFunction;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
 
-public class AggregatuibFunctionSerializableEvaluator implements
-		ICopySerializableAggregateFunction {
+public class AggregatuibFunctionSerializableEvaluator implements ICopySerializableAggregateFunction {
 
-	/**
-	 * the mode of aggregation function
-	 */
-	private GenericUDAFEvaluator.Mode mode;
+    /**
+     * the mode of aggregation function
+     */
+    private GenericUDAFEvaluator.Mode mode;
 
-	/**
-	 * an array of evaluators
-	 */
-	private ExprNodeEvaluator[] evaluators;
+    /**
+     * an array of evaluators
+     */
+    private ExprNodeEvaluator[] evaluators;
 
-	/**
-	 * udaf evaluator partial
-	 */
-	private GenericUDAFEvaluator udafPartial;
+    /**
+     * udaf evaluator partial
+     */
+    private GenericUDAFEvaluator udafPartial;
 
-	/**
-	 * udaf evaluator complete
-	 */
-	private GenericUDAFEvaluator udafComplete;
+    /**
+     * udaf evaluator complete
+     */
+    private GenericUDAFEvaluator udafComplete;
 
-	/**
-	 * cached parameter objects
-	 */
-	private Object[] cachedParameters;
+    /**
+     * cached parameter objects
+     */
+    private Object[] cachedParameters;
 
-	/**
-	 * cached row objects
-	 */
-	private LazyObject<? extends ObjectInspector> cachedRowObject;
+    /**
+     * cached row objects
+     */
+    private LazyObject<? extends ObjectInspector> cachedRowObject;
 
-	/**
-	 * aggregation buffer
-	 */
-	private SerializableBuffer aggBuffer;
+    /**
+     * aggregation buffer
+     */
+    private SerializableBuffer aggBuffer;
 
-	/**
-	 * we only use lazy serde to do serialization
-	 */
-	private SerDe lazySer;
+    /**
+     * we only use lazy serde to do serialization
+     */
+    private SerDe lazySer;
 
-	/**
-	 * the output object inspector for this aggregation function
-	 */
-	private ObjectInspector outputInspector;
+    /**
+     * the output object inspector for this aggregation function
+     */
+    private ObjectInspector outputInspector;
 
-	/**
-	 * the output object inspector for this aggregation function
-	 */
-	private ObjectInspector outputInspectorPartial;
+    /**
+     * the output object inspector for this aggregation function
+     */
+    private ObjectInspector outputInspectorPartial;
 
-	/**
-	 * parameter inspectors
-	 */
-	private ObjectInspector[] parameterInspectors;
+    /**
+     * parameter inspectors
+     */
+    private ObjectInspector[] parameterInspectors;
 
-	/**
-	 * output make sure the aggregation functio has least object creation
-	 * 
-	 * @param desc
-	 * @param oi
-	 * @param output
-	 */
-	public AggregatuibFunctionSerializableEvaluator(List<ExprNodeDesc> inputs,
-			List<TypeInfo> inputTypes, String genericUDAFName,
-			GenericUDAFEvaluator.Mode aggMode, boolean distinct,
-			ObjectInspector oi, ExprNodeEvaluator[] evals,
-			ObjectInspector[] pInspectors, Object[] parameterCache,
-			SerDe serde, LazyObject<? extends ObjectInspector> row,
-			GenericUDAFEvaluator udafunctionPartial,
-			GenericUDAFEvaluator udafunctionComplete, ObjectInspector outputOi,
-			ObjectInspector outputOiPartial) throws AlgebricksException {
-		// shared object across threads
-		this.mode = aggMode;
-		this.parameterInspectors = pInspectors;
+    /**
+     * output make sure the aggregation functio has least object creation
+     * 
+     * @param desc
+     * @param oi
+     * @param output
+     */
+    public AggregatuibFunctionSerializableEvaluator(List<ExprNodeDesc> inputs, List<TypeInfo> inputTypes,
+            String genericUDAFName, GenericUDAFEvaluator.Mode aggMode, boolean distinct, ObjectInspector oi,
+            ExprNodeEvaluator[] evals, ObjectInspector[] pInspectors, Object[] parameterCache, SerDe serde,
+            LazyObject<? extends ObjectInspector> row, GenericUDAFEvaluator udafunctionPartial,
+            GenericUDAFEvaluator udafunctionComplete, ObjectInspector outputOi, ObjectInspector outputOiPartial)
+            throws AlgebricksException {
+        // shared object across threads
+        this.mode = aggMode;
+        this.parameterInspectors = pInspectors;
 
-		// thread local objects
-		this.evaluators = evals;
-		this.cachedParameters = parameterCache;
-		this.cachedRowObject = row;
-		this.lazySer = serde;
-		this.udafPartial = udafunctionPartial;
-		this.udafComplete = udafunctionComplete;
-		this.outputInspector = outputOi;
-		this.outputInspectorPartial = outputOiPartial;
+        // thread local objects
+        this.evaluators = evals;
+        this.cachedParameters = parameterCache;
+        this.cachedRowObject = row;
+        this.lazySer = serde;
+        this.udafPartial = udafunctionPartial;
+        this.udafComplete = udafunctionComplete;
+        this.outputInspector = outputOi;
+        this.outputInspectorPartial = outputOiPartial;
 
-		try {
-			aggBuffer = (SerializableBuffer) udafPartial
-					.getNewAggregationBuffer();
-		} catch (HiveException e) {
-			throw new AlgebricksException(e);
-		}
-	}
+        try {
+            aggBuffer = (SerializableBuffer) udafPartial.getNewAggregationBuffer();
+        } catch (HiveException e) {
+            throw new AlgebricksException(e);
+        }
+    }
 
-	@Override
-	public void init(DataOutput output) throws AlgebricksException {
-		try {
-			udafPartial.reset(aggBuffer);
-			outputAggBuffer(aggBuffer, output);
-		} catch (HiveException e) {
-			throw new AlgebricksException(e);
-		}
-	}
+    @Override
+    public void init(DataOutput output) throws AlgebricksException {
+        try {
+            udafPartial.reset(aggBuffer);
+            outputAggBuffer(aggBuffer, output);
+        } catch (HiveException e) {
+            throw new AlgebricksException(e);
+        }
+    }
 
-	@Override
-	public void step(IFrameTupleReference tuple, byte[] data, int start, int len)
-			throws AlgebricksException {
-		deSerializeAggBuffer(aggBuffer, data, start, len);
-		readIntoCache(tuple);
-		processRow();
-		serializeAggBuffer(aggBuffer, data, start, len);
-	}
+    @Override
+    public void step(IFrameTupleReference tuple, byte[] data, int start, int len) throws AlgebricksException {
+        deSerializeAggBuffer(aggBuffer, data, start, len);
+        readIntoCache(tuple);
+        processRow();
+        serializeAggBuffer(aggBuffer, data, start, len);
+    }
 
-	private void processRow() throws AlgebricksException {
-		try {
-			// get values by evaluating them
-			for (int i = 0; i < cachedParameters.length; i++) {
-				cachedParameters[i] = evaluators[i].evaluate(cachedRowObject);
-			}
-			processAggregate();
-		} catch (HiveException e) {
-			throw new AlgebricksException(e);
-		}
-	}
+    private void processRow() throws AlgebricksException {
+        try {
+            // get values by evaluating them
+            for (int i = 0; i < cachedParameters.length; i++) {
+                cachedParameters[i] = evaluators[i].evaluate(cachedRowObject);
+            }
+            processAggregate();
+        } catch (HiveException e) {
+            throw new AlgebricksException(e);
+        }
+    }
 
-	private void processAggregate() throws HiveException {
-		/**
-		 * accumulate the aggregation function
-		 */
-		switch (mode) {
-		case PARTIAL1:
-		case COMPLETE:
-			udafPartial.iterate(aggBuffer, cachedParameters);
-			break;
-		case PARTIAL2:
-		case FINAL:
-			if (udafPartial instanceof GenericUDAFCount.GenericUDAFCountEvaluator) {
-				Object parameter = ((PrimitiveObjectInspector) parameterInspectors[0])
-						.getPrimitiveWritableObject(cachedParameters[0]);
-				udafPartial.merge(aggBuffer, parameter);
-			} else
-				udafPartial.merge(aggBuffer, cachedParameters[0]);
-			break;
-		default:
-			break;
-		}
-	}
+    private void processAggregate() throws HiveException {
+        /**
+         * accumulate the aggregation function
+         */
+        switch (mode) {
+            case PARTIAL1:
+            case COMPLETE:
+                udafPartial.iterate(aggBuffer, cachedParameters);
+                break;
+            case PARTIAL2:
+            case FINAL:
+                if (udafPartial instanceof GenericUDAFCount.GenericUDAFCountEvaluator) {
+                    Object parameter = ((PrimitiveObjectInspector) parameterInspectors[0])
+                            .getPrimitiveWritableObject(cachedParameters[0]);
+                    udafPartial.merge(aggBuffer, parameter);
+                } else
+                    udafPartial.merge(aggBuffer, cachedParameters[0]);
+                break;
+            default:
+                break;
+        }
+    }
 
-	/**
-	 * serialize the result
-	 * 
-	 * @param result
-	 *            the evaluation result
-	 * @throws IOException
-	 * @throws AlgebricksException
-	 */
-	private void serializeResult(Object result, ObjectInspector oi,
-			DataOutput out) throws IOException, AlgebricksException {
-		try {
-			BytesWritable outputWritable = (BytesWritable) lazySer.serialize(
-					result, oi);
-			out.write(outputWritable.getBytes(), 0, outputWritable.getLength());
-		} catch (SerDeException e) {
-			throw new AlgebricksException(e);
-		}
-	}
+    /**
+     * serialize the result
+     * 
+     * @param result
+     *            the evaluation result
+     * @throws IOException
+     * @throws AlgebricksException
+     */
+    private void serializeResult(Object result, ObjectInspector oi, DataOutput out) throws IOException,
+            AlgebricksException {
+        try {
+            BytesWritable outputWritable = (BytesWritable) lazySer.serialize(result, oi);
+            out.write(outputWritable.getBytes(), 0, outputWritable.getLength());
+        } catch (SerDeException e) {
+            throw new AlgebricksException(e);
+        }
+    }
 
-	/**
-	 * bind the tuple reference to the cached row object
-	 * 
-	 * @param r
-	 */
-	private void readIntoCache(IFrameTupleReference r) {
-		cachedRowObject.init(r);
-	}
+    /**
+     * bind the tuple reference to the cached row object
+     * 
+     * @param r
+     */
+    private void readIntoCache(IFrameTupleReference r) {
+        cachedRowObject.init(r);
+    }
 
-	@Override
-	public void finish(byte[] data, int start, int len, DataOutput output)
-			throws AlgebricksException {
-		deSerializeAggBuffer(aggBuffer, data, start, len);
-		// aggregator
-		try {
-			Object result = null;
-			result = udafPartial.terminatePartial(aggBuffer);
-			if (mode == GenericUDAFEvaluator.Mode.COMPLETE
-					|| mode == GenericUDAFEvaluator.Mode.FINAL) {
-				result = udafComplete.terminate(aggBuffer);
-				serializeResult(result, outputInspector, output);
-			} else {
-				serializeResult(result, outputInspectorPartial, output);
-			}
-		} catch (HiveException e) {
-			throw new AlgebricksException(e);
-		} catch (IOException e) {
-			throw new AlgebricksException(e);
-		}
-	}
+    @Override
+    public void finish(byte[] data, int start, int len, DataOutput output) throws AlgebricksException {
+        deSerializeAggBuffer(aggBuffer, data, start, len);
+        // aggregator
+        try {
+            Object result = null;
+            result = udafPartial.terminatePartial(aggBuffer);
+            if (mode == GenericUDAFEvaluator.Mode.COMPLETE || mode == GenericUDAFEvaluator.Mode.FINAL) {
+                result = udafComplete.terminate(aggBuffer);
+                serializeResult(result, outputInspector, output);
+            } else {
+                serializeResult(result, outputInspectorPartial, output);
+            }
+        } catch (HiveException e) {
+            throw new AlgebricksException(e);
+        } catch (IOException e) {
+            throw new AlgebricksException(e);
+        }
+    }
 
-	@Override
-	public void finishPartial(byte[] data, int start, int len, DataOutput output)
-			throws AlgebricksException {
-		deSerializeAggBuffer(aggBuffer, data, start, len);
-		// aggregator.
-		try {
-			Object result = null;
-			// get aggregations
-			result = udafPartial.terminatePartial(aggBuffer);
-			serializeResult(result, outputInspectorPartial, output);
-		} catch (HiveException e) {
-			throw new AlgebricksException(e);
-		} catch (IOException e) {
-			throw new AlgebricksException(e);
-		}
-	}
+    @Override
+    public void finishPartial(byte[] data, int start, int len, DataOutput output) throws AlgebricksException {
+        deSerializeAggBuffer(aggBuffer, data, start, len);
+        // aggregator.
+        try {
+            Object result = null;
+            // get aggregations
+            result = udafPartial.terminatePartial(aggBuffer);
+            serializeResult(result, outputInspectorPartial, output);
+        } catch (HiveException e) {
+            throw new AlgebricksException(e);
+        } catch (IOException e) {
+            throw new AlgebricksException(e);
+        }
+    }
 
-	private void serializeAggBuffer(SerializableBuffer buffer, byte[] data,
-			int start, int len) throws AlgebricksException {
-		buffer.serializeAggBuffer(data, start, len);
-	}
+    private void serializeAggBuffer(SerializableBuffer buffer, byte[] data, int start, int len)
+            throws AlgebricksException {
+        buffer.serializeAggBuffer(data, start, len);
+    }
 
-	private void deSerializeAggBuffer(SerializableBuffer buffer, byte[] data,
-			int start, int len) throws AlgebricksException {
-		buffer.deSerializeAggBuffer(data, start, len);
-	}
+    private void deSerializeAggBuffer(SerializableBuffer buffer, byte[] data, int start, int len)
+            throws AlgebricksException {
+        buffer.deSerializeAggBuffer(data, start, len);
+    }
 
-	private void outputAggBuffer(SerializableBuffer buffer, DataOutput out)
-			throws AlgebricksException {
-		try {
-			buffer.serializeAggBuffer(out);
-		} catch (IOException e) {
-			throw new AlgebricksException(e);
-		}
-	}
+    private void outputAggBuffer(SerializableBuffer buffer, DataOutput out) throws AlgebricksException {
+        try {
+            buffer.serializeAggBuffer(out);
+        } catch (IOException e) {
+            throw new AlgebricksException(e);
+        }
+    }
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/BufferSerDeUtil.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/BufferSerDeUtil.java
index d73be93..96065e5 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/BufferSerDeUtil.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/BufferSerDeUtil.java
@@ -2,72 +2,66 @@
 
 public class BufferSerDeUtil {
 
-	public static double getDouble(byte[] bytes, int offset) {
-		return Double.longBitsToDouble(getLong(bytes, offset));
-	}
+    public static double getDouble(byte[] bytes, int offset) {
+        return Double.longBitsToDouble(getLong(bytes, offset));
+    }
 
-	public static float getFloat(byte[] bytes, int offset) {
-		return Float.intBitsToFloat(getInt(bytes, offset));
-	}
+    public static float getFloat(byte[] bytes, int offset) {
+        return Float.intBitsToFloat(getInt(bytes, offset));
+    }
 
-	public static boolean getBoolean(byte[] bytes, int offset) {
-		if (bytes[offset] == 0)
-			return false;
-		else
-			return true;
-	}
+    public static boolean getBoolean(byte[] bytes, int offset) {
+        if (bytes[offset] == 0)
+            return false;
+        else
+            return true;
+    }
 
-	public static int getInt(byte[] bytes, int offset) {
-		return ((bytes[offset] & 0xff) << 24)
-				+ ((bytes[offset + 1] & 0xff) << 16)
-				+ ((bytes[offset + 2] & 0xff) << 8)
-				+ ((bytes[offset + 3] & 0xff) << 0);
-	}
+    public static int getInt(byte[] bytes, int offset) {
+        return ((bytes[offset] & 0xff) << 24) + ((bytes[offset + 1] & 0xff) << 16) + ((bytes[offset + 2] & 0xff) << 8)
+                + ((bytes[offset + 3] & 0xff) << 0);
+    }
 
-	public static long getLong(byte[] bytes, int offset) {
-		return (((long) (bytes[offset] & 0xff)) << 56)
-				+ (((long) (bytes[offset + 1] & 0xff)) << 48)
-				+ (((long) (bytes[offset + 2] & 0xff)) << 40)
-				+ (((long) (bytes[offset + 3] & 0xff)) << 32)
-				+ (((long) (bytes[offset + 4] & 0xff)) << 24)
-				+ (((long) (bytes[offset + 5] & 0xff)) << 16)
-				+ (((long) (bytes[offset + 6] & 0xff)) << 8)
-				+ (((long) (bytes[offset + 7] & 0xff)) << 0);
-	}
+    public static long getLong(byte[] bytes, int offset) {
+        return (((long) (bytes[offset] & 0xff)) << 56) + (((long) (bytes[offset + 1] & 0xff)) << 48)
+                + (((long) (bytes[offset + 2] & 0xff)) << 40) + (((long) (bytes[offset + 3] & 0xff)) << 32)
+                + (((long) (bytes[offset + 4] & 0xff)) << 24) + (((long) (bytes[offset + 5] & 0xff)) << 16)
+                + (((long) (bytes[offset + 6] & 0xff)) << 8) + (((long) (bytes[offset + 7] & 0xff)) << 0);
+    }
 
-	public static void writeBoolean(boolean value, byte[] bytes, int offset) {
-		if (value)
-			bytes[offset] = (byte) 1;
-		else
-			bytes[offset] = (byte) 0;
-	}
+    public static void writeBoolean(boolean value, byte[] bytes, int offset) {
+        if (value)
+            bytes[offset] = (byte) 1;
+        else
+            bytes[offset] = (byte) 0;
+    }
 
-	public static void writeInt(int value, byte[] bytes, int offset) {
-		bytes[offset++] = (byte) (value >> 24);
-		bytes[offset++] = (byte) (value >> 16);
-		bytes[offset++] = (byte) (value >> 8);
-		bytes[offset++] = (byte) (value);
-	}
+    public static void writeInt(int value, byte[] bytes, int offset) {
+        bytes[offset++] = (byte) (value >> 24);
+        bytes[offset++] = (byte) (value >> 16);
+        bytes[offset++] = (byte) (value >> 8);
+        bytes[offset++] = (byte) (value);
+    }
 
-	public static void writeLong(long value, byte[] bytes, int offset) {
-		bytes[offset++] = (byte) (value >> 56);
-		bytes[offset++] = (byte) (value >> 48);
-		bytes[offset++] = (byte) (value >> 40);
-		bytes[offset++] = (byte) (value >> 32);
-		bytes[offset++] = (byte) (value >> 24);
-		bytes[offset++] = (byte) (value >> 16);
-		bytes[offset++] = (byte) (value >> 8);
-		bytes[offset++] = (byte) (value);
-	}
+    public static void writeLong(long value, byte[] bytes, int offset) {
+        bytes[offset++] = (byte) (value >> 56);
+        bytes[offset++] = (byte) (value >> 48);
+        bytes[offset++] = (byte) (value >> 40);
+        bytes[offset++] = (byte) (value >> 32);
+        bytes[offset++] = (byte) (value >> 24);
+        bytes[offset++] = (byte) (value >> 16);
+        bytes[offset++] = (byte) (value >> 8);
+        bytes[offset++] = (byte) (value);
+    }
 
-	public static void writeDouble(double value, byte[] bytes, int offset) {
-		long lValue = Double.doubleToLongBits(value);
-		writeLong(lValue, bytes, offset);
-	}
+    public static void writeDouble(double value, byte[] bytes, int offset) {
+        long lValue = Double.doubleToLongBits(value);
+        writeLong(lValue, bytes, offset);
+    }
 
-	public static void writeFloat(float value, byte[] bytes, int offset) {
-		int iValue = Float.floatToIntBits(value);
-		writeInt(iValue, bytes, offset);
-	}
+    public static void writeFloat(float value, byte[] bytes, int offset) {
+        int iValue = Float.floatToIntBits(value);
+        writeInt(iValue, bytes, offset);
+    }
 
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/ColumnExpressionEvaluator.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/ColumnExpressionEvaluator.java
index 3296e19..5647f6a 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/ColumnExpressionEvaluator.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/ColumnExpressionEvaluator.java
@@ -9,10 +9,9 @@
 

 public class ColumnExpressionEvaluator extends AbstractExpressionEvaluator {

 

-	public ColumnExpressionEvaluator(ExprNodeColumnDesc expr,

-			ObjectInspector oi, IDataOutputProvider output)

-			throws AlgebricksException {

-		super(new ExprNodeColumnEvaluator(expr), oi, output);

-	}

+    public ColumnExpressionEvaluator(ExprNodeColumnDesc expr, ObjectInspector oi, IDataOutputProvider output)

+            throws AlgebricksException {

+        super(new ExprNodeColumnEvaluator(expr), oi, output);

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/ConstantExpressionEvaluator.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/ConstantExpressionEvaluator.java
index 62928e6..d8796ea 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/ConstantExpressionEvaluator.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/ConstantExpressionEvaluator.java
@@ -9,9 +9,8 @@
 

 public class ConstantExpressionEvaluator extends AbstractExpressionEvaluator {

 

-	public ConstantExpressionEvaluator(ExprNodeConstantDesc expr,

-			ObjectInspector oi, IDataOutputProvider output)

-			throws AlgebricksException {

-		super(new ExprNodeConstantEvaluator(expr), oi, output);

-	}

+    public ConstantExpressionEvaluator(ExprNodeConstantDesc expr, ObjectInspector oi, IDataOutputProvider output)

+            throws AlgebricksException {

+        super(new ExprNodeConstantEvaluator(expr), oi, output);

+    }

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/ExpressionTranslator.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/ExpressionTranslator.java
index 2180910..c40ef73 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/ExpressionTranslator.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/ExpressionTranslator.java
@@ -41,193 +41,173 @@
 

 public class ExpressionTranslator {

 

-	public static Object getHiveExpression(ILogicalExpression expr,

-			IVariableTypeEnvironment env) throws Exception {

-		if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {

-			/**

-			 * function expression

-			 */

-			AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;

-			IFunctionInfo funcInfo = funcExpr.getFunctionInfo();

-			FunctionIdentifier fid = funcInfo.getFunctionIdentifier();

+    public static Object getHiveExpression(ILogicalExpression expr, IVariableTypeEnvironment env) throws Exception {

+        if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {

+            /**

+             * function expression

+             */

+            AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;

+            IFunctionInfo funcInfo = funcExpr.getFunctionInfo();

+            FunctionIdentifier fid = funcInfo.getFunctionIdentifier();

 

-			if (fid.getName().equals(ExpressionConstant.FIELDACCESS)) {

-				Object info = ((HiveFunctionInfo) funcInfo).getInfo();

-				ExprNodeFieldDesc desc = (ExprNodeFieldDesc) info;

-				return new ExprNodeFieldDesc(desc.getTypeInfo(),

-						desc.getDesc(), desc.getFieldName(), desc.getIsList());

-			}

+            if (fid.getName().equals(ExpressionConstant.FIELDACCESS)) {

+                Object info = ((HiveFunctionInfo) funcInfo).getInfo();

+                ExprNodeFieldDesc desc = (ExprNodeFieldDesc) info;

+                return new ExprNodeFieldDesc(desc.getTypeInfo(), desc.getDesc(), desc.getFieldName(), desc.getIsList());

+            }

 

-			if (fid.getName().equals(ExpressionConstant.NULL)) {

-				return new ExprNodeNullDesc();

-			}

+            if (fid.getName().equals(ExpressionConstant.NULL)) {

+                return new ExprNodeNullDesc();

+            }

 

-			/**

-			 * argument expressions: translate argument expressions recursively

-			 * first, this logic is shared in scalar, aggregation and unnesting

-			 * function

-			 */

-			List<Mutable<ILogicalExpression>> arguments = funcExpr

-					.getArguments();

-			List<ExprNodeDesc> parameters = new ArrayList<ExprNodeDesc>();

-			for (Mutable<ILogicalExpression> argument : arguments) {

-				/**

-				 * parameters could not be aggregate function desc

-				 */

-				ExprNodeDesc parameter = (ExprNodeDesc) getHiveExpression(

-						argument.getValue(), env);

-				parameters.add(parameter);

-			}

+            /**

+             * argument expressions: translate argument expressions recursively

+             * first, this logic is shared in scalar, aggregation and unnesting

+             * function

+             */

+            List<Mutable<ILogicalExpression>> arguments = funcExpr.getArguments();

+            List<ExprNodeDesc> parameters = new ArrayList<ExprNodeDesc>();

+            for (Mutable<ILogicalExpression> argument : arguments) {

+                /**

+                 * parameters could not be aggregate function desc

+                 */

+                ExprNodeDesc parameter = (ExprNodeDesc) getHiveExpression(argument.getValue(), env);

+                parameters.add(parameter);

+            }

 

-			/**

-			 * get expression

-			 */

-			if (funcExpr instanceof ScalarFunctionCallExpression) {

-				String udfName = HiveAlgebricksBuiltInFunctionMap.INSTANCE

-						.getHiveFunctionName(fid);

-				GenericUDF udf;

-				if (udfName != null) {

-					/**

-					 * get corresponding function info for built-in functions

-					 */

-					FunctionInfo fInfo = FunctionRegistry

-							.getFunctionInfo(udfName);

-					udf = fInfo.getGenericUDF();

+            /**

+             * get expression

+             */

+            if (funcExpr instanceof ScalarFunctionCallExpression) {

+                String udfName = HiveAlgebricksBuiltInFunctionMap.INSTANCE.getHiveFunctionName(fid);

+                GenericUDF udf;

+                if (udfName != null) {

+                    /**

+                     * get corresponding function info for built-in functions

+                     */

+                    FunctionInfo fInfo = FunctionRegistry.getFunctionInfo(udfName);

+                    udf = fInfo.getGenericUDF();

 

-					int inputSize = parameters.size();

-					List<ExprNodeDesc> currentDescs = new ArrayList<ExprNodeDesc>();

+                    int inputSize = parameters.size();

+                    List<ExprNodeDesc> currentDescs = new ArrayList<ExprNodeDesc>();

 

-					// generate expression tree if necessary

-					while (inputSize > 2) {

-						int pairs = inputSize / 2;

-						for (int i = 0; i < pairs; i++) {

-							List<ExprNodeDesc> descs = new ArrayList<ExprNodeDesc>();

-							descs.add(parameters.get(2 * i));

-							descs.add(parameters.get(2 * i + 1));

-							ExprNodeDesc desc = ExprNodeGenericFuncDesc

-									.newInstance(udf, descs);

-							currentDescs.add(desc);

-						}

+                    // generate expression tree if necessary

+                    while (inputSize > 2) {

+                        int pairs = inputSize / 2;

+                        for (int i = 0; i < pairs; i++) {

+                            List<ExprNodeDesc> descs = new ArrayList<ExprNodeDesc>();

+                            descs.add(parameters.get(2 * i));

+                            descs.add(parameters.get(2 * i + 1));

+                            ExprNodeDesc desc = ExprNodeGenericFuncDesc.newInstance(udf, descs);

+                            currentDescs.add(desc);

+                        }

 

-						if (inputSize % 2 != 0) {

-							// List<ExprNodeDesc> descs = new

-							// ArrayList<ExprNodeDesc>();

-							// ExprNodeDesc lastExpr =

-							// currentDescs.remove(currentDescs.size() - 1);

-							// descs.add(lastExpr);

-							currentDescs.add(parameters.get(inputSize - 1));

-							// ExprNodeDesc desc =

-							// ExprNodeGenericFuncDesc.newInstance(udf, descs);

-							// currentDescs.add(desc);

-						}

-						inputSize = currentDescs.size();

-						parameters.clear();

-						parameters.addAll(currentDescs);

-						currentDescs.clear();

-					}

+                        if (inputSize % 2 != 0) {

+                            // List<ExprNodeDesc> descs = new

+                            // ArrayList<ExprNodeDesc>();

+                            // ExprNodeDesc lastExpr =

+                            // currentDescs.remove(currentDescs.size() - 1);

+                            // descs.add(lastExpr);

+                            currentDescs.add(parameters.get(inputSize - 1));

+                            // ExprNodeDesc desc =

+                            // ExprNodeGenericFuncDesc.newInstance(udf, descs);

+                            // currentDescs.add(desc);

+                        }

+                        inputSize = currentDescs.size();

+                        parameters.clear();

+                        parameters.addAll(currentDescs);

+                        currentDescs.clear();

+                    }

 

-				} else {

-					Object secondInfo = ((HiveFunctionInfo) funcInfo).getInfo();

-					if (secondInfo != null) {

+                } else {

+                    Object secondInfo = ((HiveFunctionInfo) funcInfo).getInfo();

+                    if (secondInfo != null) {

 

-						/**

-						 * for GenericUDFBridge: we should not call get type of

-						 * this hive expression, because parameters may have

-						 * been changed!

-						 */

-						ExprNodeGenericFuncDesc hiveExpr = (ExprNodeGenericFuncDesc) ((HiveFunctionInfo) funcInfo)

-								.getInfo();

-						udf = hiveExpr.getGenericUDF();

-					} else {

-						/**

-						 * for other generic UDF

-						 */

-						Class<?> udfClass;

-						try {

-							udfClass = Class.forName(fid.getName());

-							udf = (GenericUDF) udfClass.newInstance();

-						} catch (Exception e) {

-							e.printStackTrace();

-							throw new AlgebricksException(e.getMessage());

-						}

-					}

-				}

-				/**

-				 * get hive generic function expression

-				 */

-				ExprNodeDesc desc = ExprNodeGenericFuncDesc.newInstance(udf,

-						parameters);

-				return desc;

-			} else if (funcExpr instanceof AggregateFunctionCallExpression) {

-				/**

-				 * hive aggregation info

-				 */

-				AggregationDesc aggregateDesc = (AggregationDesc) ((HiveFunctionInfo) funcExpr

-						.getFunctionInfo()).getInfo();

-				/**

-				 * set parameters

-				 */

-				aggregateDesc

-						.setParameters((ArrayList<ExprNodeDesc>) parameters);

+                        /**

+                         * for GenericUDFBridge: we should not call get type of

+                         * this hive expression, because parameters may have

+                         * been changed!

+                         */

+                        ExprNodeGenericFuncDesc hiveExpr = (ExprNodeGenericFuncDesc) ((HiveFunctionInfo) funcInfo)

+                                .getInfo();

+                        udf = hiveExpr.getGenericUDF();

+                    } else {

+                        /**

+                         * for other generic UDF

+                         */

+                        Class<?> udfClass;

+                        try {

+                            udfClass = Class.forName(fid.getName());

+                            udf = (GenericUDF) udfClass.newInstance();

+                        } catch (Exception e) {

+                            e.printStackTrace();

+                            throw new AlgebricksException(e.getMessage());

+                        }

+                    }

+                }

+                /**

+                 * get hive generic function expression

+                 */

+                ExprNodeDesc desc = ExprNodeGenericFuncDesc.newInstance(udf, parameters);

+                return desc;

+            } else if (funcExpr instanceof AggregateFunctionCallExpression) {

+                /**

+                 * hive aggregation info

+                 */

+                AggregationDesc aggregateDesc = (AggregationDesc) ((HiveFunctionInfo) funcExpr.getFunctionInfo())

+                        .getInfo();

+                /**

+                 * set parameters

+                 */

+                aggregateDesc.setParameters((ArrayList<ExprNodeDesc>) parameters);

 

-				List<TypeInfo> originalParameterTypeInfos = new ArrayList<TypeInfo>();

-				for (ExprNodeDesc parameter : parameters) {

-					if (parameter.getTypeInfo() instanceof StructTypeInfo) {

-						originalParameterTypeInfos

-								.add(TypeInfoFactory.doubleTypeInfo);

-					} else

-						originalParameterTypeInfos.add(parameter.getTypeInfo());

-				}

+                List<TypeInfo> originalParameterTypeInfos = new ArrayList<TypeInfo>();

+                for (ExprNodeDesc parameter : parameters) {

+                    if (parameter.getTypeInfo() instanceof StructTypeInfo) {

+                        originalParameterTypeInfos.add(TypeInfoFactory.doubleTypeInfo);

+                    } else

+                        originalParameterTypeInfos.add(parameter.getTypeInfo());

+                }

 

-				GenericUDAFEvaluator eval = FunctionRegistry

-						.getGenericUDAFEvaluator(

-								aggregateDesc.getGenericUDAFName(),

-								originalParameterTypeInfos,

-								aggregateDesc.getDistinct(), false);

+                GenericUDAFEvaluator eval = FunctionRegistry.getGenericUDAFEvaluator(

+                        aggregateDesc.getGenericUDAFName(), originalParameterTypeInfos, aggregateDesc.getDistinct(),

+                        false);

 

-				AggregationDesc newAggregateDesc = new AggregationDesc(

-						aggregateDesc.getGenericUDAFName(), eval,

-						aggregateDesc.getParameters(),

-						aggregateDesc.getDistinct(), aggregateDesc.getMode());

-				return newAggregateDesc;

-			} else if (funcExpr instanceof UnnestingFunctionCallExpression) {

-				/**

-				 * type inference for UDTF function

-				 */

-				UDTFDesc hiveDesc = (UDTFDesc) ((HiveFunctionInfo) funcExpr

-						.getFunctionInfo()).getInfo();

-				String funcName = hiveDesc.getUDTFName();

-				FunctionInfo fi = FunctionRegistry.getFunctionInfo(funcName);

-				GenericUDTF udtf = fi.getGenericUDTF();

-				UDTFDesc desc = new UDTFDesc(udtf);

-				return desc;

-			} else {

-				throw new IllegalStateException(

-						"unrecognized function expression "

-								+ expr.getClass().getName());

-			}

-		} else if ((expr.getExpressionTag() == LogicalExpressionTag.VARIABLE)) {

-			/**

-			 * get type for variable in the environment

-			 */

-			VariableReferenceExpression varExpr = (VariableReferenceExpression) expr;

-			LogicalVariable var = varExpr.getVariableReference();

-			TypeInfo typeInfo = (TypeInfo) env.getVarType(var);

-			ExprNodeDesc desc = new ExprNodeColumnDesc(typeInfo,

-					var.toString(), "", false);

-			return desc;

-		} else if ((expr.getExpressionTag() == LogicalExpressionTag.CONSTANT)) {

-			/**

-			 * get expression for constant in the environment

-			 */

-			ConstantExpression varExpr = (ConstantExpression) expr;

-			Object value = ((HivesterixConstantValue) varExpr.getValue())

-					.getObject();

-			ExprNodeDesc desc = new ExprNodeConstantDesc(value);

-			return desc;

-		} else {

-			throw new IllegalStateException("illegal expressions "

-					+ expr.getClass().getName());

-		}

-	}

+                AggregationDesc newAggregateDesc = new AggregationDesc(aggregateDesc.getGenericUDAFName(), eval,

+                        aggregateDesc.getParameters(), aggregateDesc.getDistinct(), aggregateDesc.getMode());

+                return newAggregateDesc;

+            } else if (funcExpr instanceof UnnestingFunctionCallExpression) {

+                /**

+                 * type inference for UDTF function

+                 */

+                UDTFDesc hiveDesc = (UDTFDesc) ((HiveFunctionInfo) funcExpr.getFunctionInfo()).getInfo();

+                String funcName = hiveDesc.getUDTFName();

+                FunctionInfo fi = FunctionRegistry.getFunctionInfo(funcName);

+                GenericUDTF udtf = fi.getGenericUDTF();

+                UDTFDesc desc = new UDTFDesc(udtf);

+                return desc;

+            } else {

+                throw new IllegalStateException("unrecognized function expression " + expr.getClass().getName());

+            }

+        } else if ((expr.getExpressionTag() == LogicalExpressionTag.VARIABLE)) {

+            /**

+             * get type for variable in the environment

+             */

+            VariableReferenceExpression varExpr = (VariableReferenceExpression) expr;

+            LogicalVariable var = varExpr.getVariableReference();

+            TypeInfo typeInfo = (TypeInfo) env.getVarType(var);

+            ExprNodeDesc desc = new ExprNodeColumnDesc(typeInfo, var.toString(), "", false);

+            return desc;

+        } else if ((expr.getExpressionTag() == LogicalExpressionTag.CONSTANT)) {

+            /**

+             * get expression for constant in the environment

+             */

+            ConstantExpression varExpr = (ConstantExpression) expr;

+            Object value = ((HivesterixConstantValue) varExpr.getValue()).getObject();

+            ExprNodeDesc desc = new ExprNodeConstantDesc(value);

+            return desc;

+        } else {

+            throw new IllegalStateException("illegal expressions " + expr.getClass().getName());

+        }

+    }

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/FieldExpressionEvaluator.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/FieldExpressionEvaluator.java
index 5f6a5dc..35560b6 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/FieldExpressionEvaluator.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/FieldExpressionEvaluator.java
@@ -9,9 +9,9 @@
 

 public class FieldExpressionEvaluator extends AbstractExpressionEvaluator {

 

-	public FieldExpressionEvaluator(ExprNodeFieldDesc expr, ObjectInspector oi,

-			IDataOutputProvider output) throws AlgebricksException {

-		super(new ExprNodeFieldEvaluator(expr), oi, output);

-	}

+    public FieldExpressionEvaluator(ExprNodeFieldDesc expr, ObjectInspector oi, IDataOutputProvider output)

+            throws AlgebricksException {

+        super(new ExprNodeFieldEvaluator(expr), oi, output);

+    }

 

 }
\ No newline at end of file
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/FunctionExpressionEvaluator.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/FunctionExpressionEvaluator.java
index c3f3c93..7ffec7a 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/FunctionExpressionEvaluator.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/FunctionExpressionEvaluator.java
@@ -9,10 +9,9 @@
 

 public class FunctionExpressionEvaluator extends AbstractExpressionEvaluator {

 

-	public FunctionExpressionEvaluator(ExprNodeGenericFuncDesc expr,

-			ObjectInspector oi, IDataOutputProvider output)

-			throws AlgebricksException {

-		super(new ExprNodeGenericFuncEvaluator(expr), oi, output);

-	}

+    public FunctionExpressionEvaluator(ExprNodeGenericFuncDesc expr, ObjectInspector oi, IDataOutputProvider output)

+            throws AlgebricksException {

+        super(new ExprNodeGenericFuncEvaluator(expr), oi, output);

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/NullExpressionEvaluator.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/NullExpressionEvaluator.java
index cbe5561..ca60385 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/NullExpressionEvaluator.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/NullExpressionEvaluator.java
@@ -9,8 +9,8 @@
 

 public class NullExpressionEvaluator extends AbstractExpressionEvaluator {

 

-	public NullExpressionEvaluator(ExprNodeNullDesc expr, ObjectInspector oi,

-			IDataOutputProvider output) throws AlgebricksException {

-		super(new ExprNodeNullEvaluator(expr), oi, output);

-	}

+    public NullExpressionEvaluator(ExprNodeNullDesc expr, ObjectInspector oi, IDataOutputProvider output)

+            throws AlgebricksException {

+        super(new ExprNodeNullEvaluator(expr), oi, output);

+    }

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/SerializableBuffer.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/SerializableBuffer.java
index 328b384..676989e 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/SerializableBuffer.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/SerializableBuffer.java
@@ -7,10 +7,10 @@
 
 public interface SerializableBuffer extends AggregationBuffer {
 
-	public void deSerializeAggBuffer(byte[] data, int start, int len);
+    public void deSerializeAggBuffer(byte[] data, int start, int len);
 
-	public void serializeAggBuffer(byte[] data, int start, int len);
+    public void serializeAggBuffer(byte[] data, int start, int len);
 
-	public void serializeAggBuffer(DataOutput output) throws IOException;
+    public void serializeAggBuffer(DataOutput output) throws IOException;
 
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/UDTFFunctionEvaluator.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/UDTFFunctionEvaluator.java
index de0141b..284d3d2 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/UDTFFunctionEvaluator.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/UDTFFunctionEvaluator.java
@@ -23,125 +23,121 @@
 

 public class UDTFFunctionEvaluator implements ICopyUnnestingFunction, Collector {

 

-	/**

-	 * udtf function

-	 */

-	private UDTFDesc func;

+    /**

+     * udtf function

+     */

+    private UDTFDesc func;

 

-	/**

-	 * input object inspector

-	 */

-	private ObjectInspector inputInspector;

+    /**

+     * input object inspector

+     */

+    private ObjectInspector inputInspector;

 

-	/**

-	 * output object inspector

-	 */

-	private ObjectInspector outputInspector;

+    /**

+     * output object inspector

+     */

+    private ObjectInspector outputInspector;

 

-	/**

-	 * object inspector for udtf

-	 */

-	private ObjectInspector[] udtfInputOIs;

+    /**

+     * object inspector for udtf

+     */

+    private ObjectInspector[] udtfInputOIs;

 

-	/**

-	 * generic udtf

-	 */

-	private GenericUDTF udtf;

+    /**

+     * generic udtf

+     */

+    private GenericUDTF udtf;

 

-	/**

-	 * data output

-	 */

-	private DataOutput out;

+    /**

+     * data output

+     */

+    private DataOutput out;

 

-	/**

-	 * the input row object

-	 */

-	private LazyColumnar cachedRowObject;

+    /**

+     * the input row object

+     */

+    private LazyColumnar cachedRowObject;

 

-	/**

-	 * cached row object (input)

-	 */

-	private Object[] cachedInputObjects;

+    /**

+     * cached row object (input)

+     */

+    private Object[] cachedInputObjects;

 

-	/**

-	 * serialization/deserialization

-	 */

-	private SerDe lazySerDe;

+    /**

+     * serialization/deserialization

+     */

+    private SerDe lazySerDe;

 

-	/**

-	 * columns feed into UDTF

-	 */

-	private int[] columns;

+    /**

+     * columns feed into UDTF

+     */

+    private int[] columns;

 

-	public UDTFFunctionEvaluator(UDTFDesc desc, Schema schema, int[] cols,

-			DataOutput output) {

-		this.func = desc;

-		this.inputInspector = schema.toObjectInspector();

-		udtf = func.getGenericUDTF();

-		out = output;

-		columns = cols;

-	}

+    public UDTFFunctionEvaluator(UDTFDesc desc, Schema schema, int[] cols, DataOutput output) {

+        this.func = desc;

+        this.inputInspector = schema.toObjectInspector();

+        udtf = func.getGenericUDTF();

+        out = output;

+        columns = cols;

+    }

 

-	@Override

-	public void init(IFrameTupleReference tuple) throws AlgebricksException {

-		cachedInputObjects = new LazyObject[columns.length];

-		try {

-			cachedRowObject = (LazyColumnar) LazyFactory

-					.createLazyObject(inputInspector);

-			outputInspector = udtf.initialize(udtfInputOIs);

-		} catch (HiveException e) {

-			throw new AlgebricksException(e);

-		}

-		udtf.setCollector(this);

-		lazySerDe = new LazySerDe();

-		readIntoCache(tuple);

-	}

+    @Override

+    public void init(IFrameTupleReference tuple) throws AlgebricksException {

+        cachedInputObjects = new LazyObject[columns.length];

+        try {

+            cachedRowObject = (LazyColumnar) LazyFactory.createLazyObject(inputInspector);

+            outputInspector = udtf.initialize(udtfInputOIs);

+        } catch (HiveException e) {

+            throw new AlgebricksException(e);

+        }

+        udtf.setCollector(this);

+        lazySerDe = new LazySerDe();

+        readIntoCache(tuple);

+    }

 

-	@Override

-	public boolean step() throws AlgebricksException {

-		try {

-			udtf.process(cachedInputObjects);

-			return true;

-		} catch (HiveException e) {

-			throw new AlgebricksException(e);

-		}

-	}

+    @Override

+    public boolean step() throws AlgebricksException {

+        try {

+            udtf.process(cachedInputObjects);

+            return true;

+        } catch (HiveException e) {

+            throw new AlgebricksException(e);

+        }

+    }

 

-	/**

-	 * bind the tuple reference to the cached row object

-	 * 

-	 * @param r

-	 */

-	private void readIntoCache(IFrameTupleReference r) {

-		cachedRowObject.init(r);

-		for (int i = 0; i < cachedInputObjects.length; i++) {

-			cachedInputObjects[i] = cachedRowObject.getField(columns[i]);

-		}

-	}

+    /**

+     * bind the tuple reference to the cached row object

+     * 

+     * @param r

+     */

+    private void readIntoCache(IFrameTupleReference r) {

+        cachedRowObject.init(r);

+        for (int i = 0; i < cachedInputObjects.length; i++) {

+            cachedInputObjects[i] = cachedRowObject.getField(columns[i]);

+        }

+    }

 

-	/**

-	 * serialize the result

-	 * 

-	 * @param result

-	 *            the evaluation result

-	 * @throws IOException

-	 * @throws AlgebricksException

-	 */

-	private void serializeResult(Object result) throws SerDeException,

-			IOException {

-		BytesWritable outputWritable = (BytesWritable) lazySerDe.serialize(

-				result, outputInspector);

-		out.write(outputWritable.getBytes(), 0, outputWritable.getLength());

-	}

+    /**

+     * serialize the result

+     * 

+     * @param result

+     *            the evaluation result

+     * @throws IOException

+     * @throws AlgebricksException

+     */

+    private void serializeResult(Object result) throws SerDeException, IOException {

+        BytesWritable outputWritable = (BytesWritable) lazySerDe.serialize(result, outputInspector);

+        out.write(outputWritable.getBytes(), 0, outputWritable.getLength());

+    }

 

-	@Override

-	public void collect(Object input) throws HiveException {

-		try {

-			serializeResult(input);

-		} catch (IOException e) {

-			throw new HiveException(e);

-		} catch (SerDeException e) {

-			throw new HiveException(e);

-		}

-	}

+    @Override

+    public void collect(Object input) throws HiveException {

+        try {

+            serializeResult(input);

+        } catch (IOException e) {

+            throw new HiveException(e);

+        } catch (SerDeException e) {

+            throw new HiveException(e);

+        }

+    }

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/exec/IExecutionEngine.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/exec/IExecutionEngine.java
index 8f4c471..c64a39b 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/exec/IExecutionEngine.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/exec/IExecutionEngine.java
@@ -7,19 +7,19 @@
 

 public interface IExecutionEngine {

 

-	/**

-	 * compile the job

-	 * 

-	 * @param rootTasks

-	 *            : Hive MapReduce plan

-	 * @return 0 pass, 1 fail

-	 */

-	public int compileJob(List<Task<? extends Serializable>> rootTasks);

+    /**

+     * compile the job

+     * 

+     * @param rootTasks

+     *            : Hive MapReduce plan

+     * @return 0 pass, 1 fail

+     */

+    public int compileJob(List<Task<? extends Serializable>> rootTasks);

 

-	/**

-	 * execute the job with latest compiled plan

-	 * 

-	 * @return

-	 */

-	public int executeJob();

+    /**

+     * execute the job with latest compiled plan

+     * 

+     * @return

+     */

+    public int executeJob();

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveByteBinaryAscComparatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveByteBinaryAscComparatorFactory.java
index 9c2d463..f3b76e4 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveByteBinaryAscComparatorFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveByteBinaryAscComparatorFactory.java
@@ -3,34 +3,32 @@
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

 

-public class HiveByteBinaryAscComparatorFactory implements

-		IBinaryComparatorFactory {

-	private static final long serialVersionUID = 1L;

+public class HiveByteBinaryAscComparatorFactory implements IBinaryComparatorFactory {

+    private static final long serialVersionUID = 1L;

 

-	public static HiveByteBinaryAscComparatorFactory INSTANCE = new HiveByteBinaryAscComparatorFactory();

+    public static HiveByteBinaryAscComparatorFactory INSTANCE = new HiveByteBinaryAscComparatorFactory();

 

-	private HiveByteBinaryAscComparatorFactory() {

-	}

+    private HiveByteBinaryAscComparatorFactory() {

+    }

 

-	@Override

-	public IBinaryComparator createBinaryComparator() {

-		return new IBinaryComparator() {

-			private byte left;

-			private byte right;

+    @Override

+    public IBinaryComparator createBinaryComparator() {

+        return new IBinaryComparator() {

+            private byte left;

+            private byte right;

 

-			@Override

-			public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2,

-					int l2) {

-				left = b1[s1];

-				right = b2[s2];

-				if (left > right)

-					return 1;

-				else if (left == right)

-					return 0;

-				else

-					return -1;

-			}

-		};

-	}

+            @Override

+            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {

+                left = b1[s1];

+                right = b2[s2];

+                if (left > right)

+                    return 1;

+                else if (left == right)

+                    return 0;

+                else

+                    return -1;

+            }

+        };

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveByteBinaryDescComparatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveByteBinaryDescComparatorFactory.java
index ee71655..8d452dc 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveByteBinaryDescComparatorFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveByteBinaryDescComparatorFactory.java
@@ -3,33 +3,31 @@
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

 

-public class HiveByteBinaryDescComparatorFactory implements

-		IBinaryComparatorFactory {

-	private static final long serialVersionUID = 1L;

+public class HiveByteBinaryDescComparatorFactory implements IBinaryComparatorFactory {

+    private static final long serialVersionUID = 1L;

 

-	public static HiveByteBinaryDescComparatorFactory INSTANCE = new HiveByteBinaryDescComparatorFactory();

+    public static HiveByteBinaryDescComparatorFactory INSTANCE = new HiveByteBinaryDescComparatorFactory();

 

-	private HiveByteBinaryDescComparatorFactory() {

-	}

+    private HiveByteBinaryDescComparatorFactory() {

+    }

 

-	@Override

-	public IBinaryComparator createBinaryComparator() {

-		return new IBinaryComparator() {

-			private byte left;

-			private byte right;

+    @Override

+    public IBinaryComparator createBinaryComparator() {

+        return new IBinaryComparator() {

+            private byte left;

+            private byte right;

 

-			@Override

-			public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2,

-					int l2) {

-				left = b1[s1];

-				right = b2[s2];

-				if (left > right)

-					return -1;

-				else if (left == right)

-					return 0;

-				else

-					return 1;

-			}

-		};

-	}

+            @Override

+            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {

+                left = b1[s1];

+                right = b2[s2];

+                if (left > right)

+                    return -1;

+                else if (left == right)

+                    return 0;

+                else

+                    return 1;

+            }

+        };

+    }

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveDoubleBinaryAscComparatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveDoubleBinaryAscComparatorFactory.java
index 739e417..0b5350a 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveDoubleBinaryAscComparatorFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveDoubleBinaryAscComparatorFactory.java
@@ -4,36 +4,32 @@
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

 

-public class HiveDoubleBinaryAscComparatorFactory implements

-		IBinaryComparatorFactory {

-	private static final long serialVersionUID = 1L;

+public class HiveDoubleBinaryAscComparatorFactory implements IBinaryComparatorFactory {

+    private static final long serialVersionUID = 1L;

 

-	public static HiveDoubleBinaryAscComparatorFactory INSTANCE = new HiveDoubleBinaryAscComparatorFactory();

+    public static HiveDoubleBinaryAscComparatorFactory INSTANCE = new HiveDoubleBinaryAscComparatorFactory();

 

-	private HiveDoubleBinaryAscComparatorFactory() {

-	}

+    private HiveDoubleBinaryAscComparatorFactory() {

+    }

 

-	@Override

-	public IBinaryComparator createBinaryComparator() {

-		return new IBinaryComparator() {

-			private double left;

-			private double right;

+    @Override

+    public IBinaryComparator createBinaryComparator() {

+        return new IBinaryComparator() {

+            private double left;

+            private double right;

 

-			@Override

-			public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2,

-					int l2) {

-				left = Double.longBitsToDouble(LazyUtils

-						.byteArrayToLong(b1, s1));

-				right = Double.longBitsToDouble(LazyUtils.byteArrayToLong(b2,

-						s2));

-				if (left > right)

-					return 1;

-				else if (left == right)

-					return 0;

-				else

-					return -1;

-			}

-		};

-	}

+            @Override

+            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {

+                left = Double.longBitsToDouble(LazyUtils.byteArrayToLong(b1, s1));

+                right = Double.longBitsToDouble(LazyUtils.byteArrayToLong(b2, s2));

+                if (left > right)

+                    return 1;

+                else if (left == right)

+                    return 0;

+                else

+                    return -1;

+            }

+        };

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveDoubleBinaryDescComparatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveDoubleBinaryDescComparatorFactory.java
index 0424c9f..2405956 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveDoubleBinaryDescComparatorFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveDoubleBinaryDescComparatorFactory.java
@@ -4,36 +4,32 @@
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

 

-public class HiveDoubleBinaryDescComparatorFactory implements

-		IBinaryComparatorFactory {

-	private static final long serialVersionUID = 1L;

+public class HiveDoubleBinaryDescComparatorFactory implements IBinaryComparatorFactory {

+    private static final long serialVersionUID = 1L;

 

-	public static HiveDoubleBinaryDescComparatorFactory INSTANCE = new HiveDoubleBinaryDescComparatorFactory();

+    public static HiveDoubleBinaryDescComparatorFactory INSTANCE = new HiveDoubleBinaryDescComparatorFactory();

 

-	private HiveDoubleBinaryDescComparatorFactory() {

-	}

+    private HiveDoubleBinaryDescComparatorFactory() {

+    }

 

-	@Override

-	public IBinaryComparator createBinaryComparator() {

-		return new IBinaryComparator() {

-			private double left;

-			private double right;

+    @Override

+    public IBinaryComparator createBinaryComparator() {

+        return new IBinaryComparator() {

+            private double left;

+            private double right;

 

-			@Override

-			public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2,

-					int l2) {

-				left = Double.longBitsToDouble(LazyUtils

-						.byteArrayToLong(b1, s1));

-				right = Double.longBitsToDouble(LazyUtils.byteArrayToLong(b2,

-						s2));

-				if (left > right)

-					return -1;

-				else if (left == right)

-					return 0;

-				else

-					return 1;

-			}

-		};

-	}

+            @Override

+            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {

+                left = Double.longBitsToDouble(LazyUtils.byteArrayToLong(b1, s1));

+                right = Double.longBitsToDouble(LazyUtils.byteArrayToLong(b2, s2));

+                if (left > right)

+                    return -1;

+                else if (left == right)

+                    return 0;

+                else

+                    return 1;

+            }

+        };

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveFloatBinaryAscComparatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveFloatBinaryAscComparatorFactory.java
index 08542a7..05a43e6 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveFloatBinaryAscComparatorFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveFloatBinaryAscComparatorFactory.java
@@ -4,34 +4,32 @@
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

 

-public class HiveFloatBinaryAscComparatorFactory implements

-		IBinaryComparatorFactory {

-	private static final long serialVersionUID = 1L;

+public class HiveFloatBinaryAscComparatorFactory implements IBinaryComparatorFactory {

+    private static final long serialVersionUID = 1L;

 

-	public static HiveFloatBinaryAscComparatorFactory INSTANCE = new HiveFloatBinaryAscComparatorFactory();

+    public static HiveFloatBinaryAscComparatorFactory INSTANCE = new HiveFloatBinaryAscComparatorFactory();

 

-	private HiveFloatBinaryAscComparatorFactory() {

-	}

+    private HiveFloatBinaryAscComparatorFactory() {

+    }

 

-	@Override

-	public IBinaryComparator createBinaryComparator() {

-		return new IBinaryComparator() {

-			private float left;

-			private float right;

+    @Override

+    public IBinaryComparator createBinaryComparator() {

+        return new IBinaryComparator() {

+            private float left;

+            private float right;

 

-			@Override

-			public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2,

-					int l2) {

-				left = Float.intBitsToFloat(LazyUtils.byteArrayToInt(b1, s1));

-				right = Float.intBitsToFloat(LazyUtils.byteArrayToInt(b2, s2));

-				if (left > right)

-					return 1;

-				else if (left == right)

-					return 0;

-				else

-					return -1;

-			}

-		};

-	}

+            @Override

+            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {

+                left = Float.intBitsToFloat(LazyUtils.byteArrayToInt(b1, s1));

+                right = Float.intBitsToFloat(LazyUtils.byteArrayToInt(b2, s2));

+                if (left > right)

+                    return 1;

+                else if (left == right)

+                    return 0;

+                else

+                    return -1;

+            }

+        };

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveFloatBinaryDescComparatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveFloatBinaryDescComparatorFactory.java
index 513512e..2c44f97 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveFloatBinaryDescComparatorFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveFloatBinaryDescComparatorFactory.java
@@ -4,34 +4,32 @@
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

 

-public class HiveFloatBinaryDescComparatorFactory implements

-		IBinaryComparatorFactory {

-	private static final long serialVersionUID = 1L;

+public class HiveFloatBinaryDescComparatorFactory implements IBinaryComparatorFactory {

+    private static final long serialVersionUID = 1L;

 

-	public static HiveFloatBinaryDescComparatorFactory INSTANCE = new HiveFloatBinaryDescComparatorFactory();

+    public static HiveFloatBinaryDescComparatorFactory INSTANCE = new HiveFloatBinaryDescComparatorFactory();

 

-	private HiveFloatBinaryDescComparatorFactory() {

-	}

+    private HiveFloatBinaryDescComparatorFactory() {

+    }

 

-	@Override

-	public IBinaryComparator createBinaryComparator() {

-		return new IBinaryComparator() {

-			private float left;

-			private float right;

+    @Override

+    public IBinaryComparator createBinaryComparator() {

+        return new IBinaryComparator() {

+            private float left;

+            private float right;

 

-			@Override

-			public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2,

-					int l2) {

-				left = Float.intBitsToFloat(LazyUtils.byteArrayToInt(b1, s1));

-				right = Float.intBitsToFloat(LazyUtils.byteArrayToInt(b2, s2));

-				if (left > right)

-					return -1;

-				else if (left == right)

-					return 0;

-				else

-					return 1;

-			}

-		};

-	}

+            @Override

+            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {

+                left = Float.intBitsToFloat(LazyUtils.byteArrayToInt(b1, s1));

+                right = Float.intBitsToFloat(LazyUtils.byteArrayToInt(b2, s2));

+                if (left > right)

+                    return -1;

+                else if (left == right)

+                    return 0;

+                else

+                    return 1;

+            }

+        };

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveIntegerBinaryAscComparatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveIntegerBinaryAscComparatorFactory.java
index 947f30f..0127791 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveIntegerBinaryAscComparatorFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveIntegerBinaryAscComparatorFactory.java
@@ -5,40 +5,36 @@
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

 

-public class HiveIntegerBinaryAscComparatorFactory implements

-		IBinaryComparatorFactory {

-	private static final long serialVersionUID = 1L;

+public class HiveIntegerBinaryAscComparatorFactory implements IBinaryComparatorFactory {

+    private static final long serialVersionUID = 1L;

 

-	public static final HiveIntegerBinaryAscComparatorFactory INSTANCE = new HiveIntegerBinaryAscComparatorFactory();

+    public static final HiveIntegerBinaryAscComparatorFactory INSTANCE = new HiveIntegerBinaryAscComparatorFactory();

 

-	private HiveIntegerBinaryAscComparatorFactory() {

-	}

+    private HiveIntegerBinaryAscComparatorFactory() {

+    }

 

-	@Override

-	public IBinaryComparator createBinaryComparator() {

-		return new IBinaryComparator() {

-			private VInt left = new VInt();

-			private VInt right = new VInt();

+    @Override

+    public IBinaryComparator createBinaryComparator() {

+        return new IBinaryComparator() {

+            private VInt left = new VInt();

+            private VInt right = new VInt();

 

-			@Override

-			public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2,

-					int l2) {

-				LazyUtils.readVInt(b1, s1, left);

-				LazyUtils.readVInt(b2, s2, right);

+            @Override

+            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {

+                LazyUtils.readVInt(b1, s1, left);

+                LazyUtils.readVInt(b2, s2, right);

 

-				if (left.length != l1 || right.length != l2)

-					throw new IllegalArgumentException(

-							"length mismatch in int comparator function actual: "

-									+ left.length + "," + right.length

-									+ " expected " + l1 + "," + l2);

+                if (left.length != l1 || right.length != l2)

+                    throw new IllegalArgumentException("length mismatch in int comparator function actual: "

+                            + left.length + "," + right.length + " expected " + l1 + "," + l2);

 

-				if (left.value > right.value)

-					return 1;

-				else if (left.value == right.value)

-					return 0;

-				else

-					return -1;

-			}

-		};

-	}

+                if (left.value > right.value)

+                    return 1;

+                else if (left.value == right.value)

+                    return 0;

+                else

+                    return -1;

+            }

+        };

+    }

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveIntegerBinaryDescComparatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveIntegerBinaryDescComparatorFactory.java
index 7614aa1..5116337 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveIntegerBinaryDescComparatorFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveIntegerBinaryDescComparatorFactory.java
@@ -5,37 +5,34 @@
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

 

-public class HiveIntegerBinaryDescComparatorFactory implements

-		IBinaryComparatorFactory {

-	private static final long serialVersionUID = 1L;

+public class HiveIntegerBinaryDescComparatorFactory implements IBinaryComparatorFactory {

+    private static final long serialVersionUID = 1L;

 

-	public static final HiveIntegerBinaryDescComparatorFactory INSTANCE = new HiveIntegerBinaryDescComparatorFactory();

+    public static final HiveIntegerBinaryDescComparatorFactory INSTANCE = new HiveIntegerBinaryDescComparatorFactory();

 

-	private HiveIntegerBinaryDescComparatorFactory() {

-	}

+    private HiveIntegerBinaryDescComparatorFactory() {

+    }

 

-	@Override

-	public IBinaryComparator createBinaryComparator() {

-		return new IBinaryComparator() {

-			private VInt left = new VInt();

-			private VInt right = new VInt();

+    @Override

+    public IBinaryComparator createBinaryComparator() {

+        return new IBinaryComparator() {

+            private VInt left = new VInt();

+            private VInt right = new VInt();

 

-			@Override

-			public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2,

-					int l2) {

-				LazyUtils.readVInt(b1, s1, left);

-				LazyUtils.readVInt(b2, s2, right);

-				if (left.length != l1 || right.length != l2)

-					throw new IllegalArgumentException(

-							"length mismatch in int comparator function actual: "

-									+ left.length + " expected " + l1);

-				if (left.value > right.value)

-					return -1;

-				else if (left.value == right.value)

-					return 0;

-				else

-					return 1;

-			}

-		};

-	}

+            @Override

+            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {

+                LazyUtils.readVInt(b1, s1, left);

+                LazyUtils.readVInt(b2, s2, right);

+                if (left.length != l1 || right.length != l2)

+                    throw new IllegalArgumentException("length mismatch in int comparator function actual: "

+                            + left.length + " expected " + l1);

+                if (left.value > right.value)

+                    return -1;

+                else if (left.value == right.value)

+                    return 0;

+                else

+                    return 1;

+            }

+        };

+    }

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveLongBinaryAscComparatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveLongBinaryAscComparatorFactory.java
index f5f3473..fa416a9 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveLongBinaryAscComparatorFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveLongBinaryAscComparatorFactory.java
@@ -5,37 +5,34 @@
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

 

-public class HiveLongBinaryAscComparatorFactory implements

-		IBinaryComparatorFactory {

-	private static final long serialVersionUID = 1L;

+public class HiveLongBinaryAscComparatorFactory implements IBinaryComparatorFactory {

+    private static final long serialVersionUID = 1L;

 

-	public static final HiveLongBinaryAscComparatorFactory INSTANCE = new HiveLongBinaryAscComparatorFactory();

+    public static final HiveLongBinaryAscComparatorFactory INSTANCE = new HiveLongBinaryAscComparatorFactory();

 

-	private HiveLongBinaryAscComparatorFactory() {

-	}

+    private HiveLongBinaryAscComparatorFactory() {

+    }

 

-	@Override

-	public IBinaryComparator createBinaryComparator() {

-		return new IBinaryComparator() {

-			private VLong left = new VLong();

-			private VLong right = new VLong();

+    @Override

+    public IBinaryComparator createBinaryComparator() {

+        return new IBinaryComparator() {

+            private VLong left = new VLong();

+            private VLong right = new VLong();

 

-			@Override

-			public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2,

-					int l2) {

-				LazyUtils.readVLong(b1, s1, left);

-				LazyUtils.readVLong(b2, s2, right);

-				if (left.length != l1 || right.length != l2)

-					throw new IllegalArgumentException(

-							"length mismatch in int comparator function actual: "

-									+ left.length + " expected " + l1);

-				if (left.value > right.value)

-					return 1;

-				else if (left.value == right.value)

-					return 0;

-				else

-					return -1;

-			}

-		};

-	}

+            @Override

+            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {

+                LazyUtils.readVLong(b1, s1, left);

+                LazyUtils.readVLong(b2, s2, right);

+                if (left.length != l1 || right.length != l2)

+                    throw new IllegalArgumentException("length mismatch in int comparator function actual: "

+                            + left.length + " expected " + l1);

+                if (left.value > right.value)

+                    return 1;

+                else if (left.value == right.value)

+                    return 0;

+                else

+                    return -1;

+            }

+        };

+    }

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveLongBinaryDescComparatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveLongBinaryDescComparatorFactory.java
index b878b22..e72dc62 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveLongBinaryDescComparatorFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveLongBinaryDescComparatorFactory.java
@@ -5,37 +5,34 @@
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

 

-public class HiveLongBinaryDescComparatorFactory implements

-		IBinaryComparatorFactory {

-	private static final long serialVersionUID = 1L;

+public class HiveLongBinaryDescComparatorFactory implements IBinaryComparatorFactory {

+    private static final long serialVersionUID = 1L;

 

-	public static final HiveLongBinaryDescComparatorFactory INSTANCE = new HiveLongBinaryDescComparatorFactory();

+    public static final HiveLongBinaryDescComparatorFactory INSTANCE = new HiveLongBinaryDescComparatorFactory();

 

-	private HiveLongBinaryDescComparatorFactory() {

-	}

+    private HiveLongBinaryDescComparatorFactory() {

+    }

 

-	@Override

-	public IBinaryComparator createBinaryComparator() {

-		return new IBinaryComparator() {

-			private VLong left = new VLong();

-			private VLong right = new VLong();

+    @Override

+    public IBinaryComparator createBinaryComparator() {

+        return new IBinaryComparator() {

+            private VLong left = new VLong();

+            private VLong right = new VLong();

 

-			@Override

-			public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2,

-					int l2) {

-				LazyUtils.readVLong(b1, s1, left);

-				LazyUtils.readVLong(b2, s2, right);

-				if (left.length != l1 || right.length != l2)

-					throw new IllegalArgumentException(

-							"length mismatch in int comparator function actual: "

-									+ left.length + " expected " + l1);

-				if (left.value > right.value)

-					return -1;

-				else if (left.value == right.value)

-					return 0;

-				else

-					return 1;

-			}

-		};

-	}

+            @Override

+            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {

+                LazyUtils.readVLong(b1, s1, left);

+                LazyUtils.readVLong(b2, s2, right);

+                if (left.length != l1 || right.length != l2)

+                    throw new IllegalArgumentException("length mismatch in int comparator function actual: "

+                            + left.length + " expected " + l1);

+                if (left.value > right.value)

+                    return -1;

+                else if (left.value == right.value)

+                    return 0;

+                else

+                    return 1;

+            }

+        };

+    }

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveShortBinaryAscComparatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveShortBinaryAscComparatorFactory.java
index 8d55cdb..a3745fa 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveShortBinaryAscComparatorFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveShortBinaryAscComparatorFactory.java
@@ -4,34 +4,32 @@
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

 

-public class HiveShortBinaryAscComparatorFactory implements

-		IBinaryComparatorFactory {

-	private static final long serialVersionUID = 1L;

+public class HiveShortBinaryAscComparatorFactory implements IBinaryComparatorFactory {

+    private static final long serialVersionUID = 1L;

 

-	public static HiveShortBinaryAscComparatorFactory INSTANCE = new HiveShortBinaryAscComparatorFactory();

+    public static HiveShortBinaryAscComparatorFactory INSTANCE = new HiveShortBinaryAscComparatorFactory();

 

-	private HiveShortBinaryAscComparatorFactory() {

-	}

+    private HiveShortBinaryAscComparatorFactory() {

+    }

 

-	@Override

-	public IBinaryComparator createBinaryComparator() {

-		return new IBinaryComparator() {

-			private short left;

-			private short right;

+    @Override

+    public IBinaryComparator createBinaryComparator() {

+        return new IBinaryComparator() {

+            private short left;

+            private short right;

 

-			@Override

-			public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2,

-					int l2) {

-				left = LazyUtils.byteArrayToShort(b1, s1);

-				right = LazyUtils.byteArrayToShort(b2, s2);

-				if (left > right)

-					return 1;

-				else if (left == right)

-					return 0;

-				else

-					return -1;

-			}

-		};

-	}

+            @Override

+            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {

+                left = LazyUtils.byteArrayToShort(b1, s1);

+                right = LazyUtils.byteArrayToShort(b2, s2);

+                if (left > right)

+                    return 1;

+                else if (left == right)

+                    return 0;

+                else

+                    return -1;

+            }

+        };

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveShortBinaryDescComparatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveShortBinaryDescComparatorFactory.java
index 4e8dde6..44d3f43 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveShortBinaryDescComparatorFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveShortBinaryDescComparatorFactory.java
@@ -4,34 +4,32 @@
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

 

-public class HiveShortBinaryDescComparatorFactory implements

-		IBinaryComparatorFactory {

-	private static final long serialVersionUID = 1L;

+public class HiveShortBinaryDescComparatorFactory implements IBinaryComparatorFactory {

+    private static final long serialVersionUID = 1L;

 

-	public static HiveShortBinaryDescComparatorFactory INSTANCE = new HiveShortBinaryDescComparatorFactory();

+    public static HiveShortBinaryDescComparatorFactory INSTANCE = new HiveShortBinaryDescComparatorFactory();

 

-	private HiveShortBinaryDescComparatorFactory() {

-	}

+    private HiveShortBinaryDescComparatorFactory() {

+    }

 

-	@Override

-	public IBinaryComparator createBinaryComparator() {

-		return new IBinaryComparator() {

-			private short left;

-			private short right;

+    @Override

+    public IBinaryComparator createBinaryComparator() {

+        return new IBinaryComparator() {

+            private short left;

+            private short right;

 

-			@Override

-			public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2,

-					int l2) {

-				left = LazyUtils.byteArrayToShort(b1, s1);

-				right = LazyUtils.byteArrayToShort(b2, s2);

-				if (left > right)

-					return -1;

-				else if (left == right)

-					return 0;

-				else

-					return 1;

-			}

-		};

-	}

+            @Override

+            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {

+                left = LazyUtils.byteArrayToShort(b1, s1);

+                right = LazyUtils.byteArrayToShort(b2, s2);

+                if (left > right)

+                    return -1;

+                else if (left == right)

+                    return 0;

+                else

+                    return 1;

+            }

+        };

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveStringBinaryAscComparatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveStringBinaryAscComparatorFactory.java
index a334ecf..6da9716 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveStringBinaryAscComparatorFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveStringBinaryAscComparatorFactory.java
@@ -7,40 +7,34 @@
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

 

-public class HiveStringBinaryAscComparatorFactory implements

-		IBinaryComparatorFactory {

-	private static final long serialVersionUID = 1L;

+public class HiveStringBinaryAscComparatorFactory implements IBinaryComparatorFactory {

+    private static final long serialVersionUID = 1L;

 

-	public static HiveStringBinaryAscComparatorFactory INSTANCE = new HiveStringBinaryAscComparatorFactory();

+    public static HiveStringBinaryAscComparatorFactory INSTANCE = new HiveStringBinaryAscComparatorFactory();

 

-	private HiveStringBinaryAscComparatorFactory() {

-	}

+    private HiveStringBinaryAscComparatorFactory() {

+    }

 

-	@Override

-	public IBinaryComparator createBinaryComparator() {

-		return new IBinaryComparator() {

-			private VInt leftLen = new VInt();

-			private VInt rightLen = new VInt();

+    @Override

+    public IBinaryComparator createBinaryComparator() {

+        return new IBinaryComparator() {

+            private VInt leftLen = new VInt();

+            private VInt rightLen = new VInt();

 

-			@Override

-			public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2,

-					int l2) {

-				LazyUtils.readVInt(b1, s1, leftLen);

-				LazyUtils.readVInt(b2, s2, rightLen);

+            @Override

+            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {

+                LazyUtils.readVInt(b1, s1, leftLen);

+                LazyUtils.readVInt(b2, s2, rightLen);

 

-				if (leftLen.value + leftLen.length != l1

-						|| rightLen.value + rightLen.length != l2)

-					throw new IllegalStateException(

-							"parse string: length mismatch, expected "

-									+ (leftLen.value + leftLen.length) + ", "

-									+ (rightLen.value + rightLen.length)

-									+ " but get " + l1 + ", " + l2);

+                if (leftLen.value + leftLen.length != l1 || rightLen.value + rightLen.length != l2)

+                    throw new IllegalStateException("parse string: length mismatch, expected "

+                            + (leftLen.value + leftLen.length) + ", " + (rightLen.value + rightLen.length)

+                            + " but get " + l1 + ", " + l2);

 

-				return Text.Comparator.compareBytes(b1, s1 + leftLen.length, l1

-						- leftLen.length, b2, s2 + rightLen.length, l2

-						- rightLen.length);

-			}

-		};

-	}

+                return Text.Comparator.compareBytes(b1, s1 + leftLen.length, l1 - leftLen.length, b2, s2

+                        + rightLen.length, l2 - rightLen.length);

+            }

+        };

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveStringBinaryDescComparatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveStringBinaryDescComparatorFactory.java
index e00b58e..c579711 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveStringBinaryDescComparatorFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveStringBinaryDescComparatorFactory.java
@@ -7,39 +7,33 @@
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

 

-public class HiveStringBinaryDescComparatorFactory implements

-		IBinaryComparatorFactory {

-	private static final long serialVersionUID = 1L;

+public class HiveStringBinaryDescComparatorFactory implements IBinaryComparatorFactory {

+    private static final long serialVersionUID = 1L;

 

-	public static HiveStringBinaryDescComparatorFactory INSTANCE = new HiveStringBinaryDescComparatorFactory();

+    public static HiveStringBinaryDescComparatorFactory INSTANCE = new HiveStringBinaryDescComparatorFactory();

 

-	private HiveStringBinaryDescComparatorFactory() {

-	}

+    private HiveStringBinaryDescComparatorFactory() {

+    }

 

-	@Override

-	public IBinaryComparator createBinaryComparator() {

-		return new IBinaryComparator() {

-			private VInt leftLen = new VInt();

-			private VInt rightLen = new VInt();

+    @Override

+    public IBinaryComparator createBinaryComparator() {

+        return new IBinaryComparator() {

+            private VInt leftLen = new VInt();

+            private VInt rightLen = new VInt();

 

-			@Override

-			public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2,

-					int l2) {

-				LazyUtils.readVInt(b1, s1, leftLen);

-				LazyUtils.readVInt(b2, s2, rightLen);

+            @Override

+            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {

+                LazyUtils.readVInt(b1, s1, leftLen);

+                LazyUtils.readVInt(b2, s2, rightLen);

 

-				if (leftLen.value + leftLen.length != l1

-						|| rightLen.value + rightLen.length != l2)

-					throw new IllegalStateException(

-							"parse string: length mismatch, expected "

-									+ (leftLen.value + leftLen.length) + ", "

-									+ (rightLen.value + rightLen.length)

-									+ " but get " + l1 + ", " + l2);

+                if (leftLen.value + leftLen.length != l1 || rightLen.value + rightLen.length != l2)

+                    throw new IllegalStateException("parse string: length mismatch, expected "

+                            + (leftLen.value + leftLen.length) + ", " + (rightLen.value + rightLen.length)

+                            + " but get " + l1 + ", " + l2);

 

-				return -WritableComparator.compareBytes(b1,

-						s1 + leftLen.length, l1 - leftLen.length, b2, s2

-								+ rightLen.length, l2 - rightLen.length);

-			}

-		};

-	}

+                return -WritableComparator.compareBytes(b1, s1 + leftLen.length, l1 - leftLen.length, b2, s2

+                        + rightLen.length, l2 - rightLen.length);

+            }

+        };

+    }

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/AggregationFunctionFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/AggregationFunctionFactory.java
index c6078ca..99cf758 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/AggregationFunctionFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/AggregationFunctionFactory.java
@@ -32,350 +32,336 @@
 import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyAggregateFunctionFactory;

 import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;

 

-public class AggregationFunctionFactory implements

-		ICopyAggregateFunctionFactory {

+public class AggregationFunctionFactory implements ICopyAggregateFunctionFactory {

 

-	private static final long serialVersionUID = 1L;

+    private static final long serialVersionUID = 1L;

 

-	/**

-	 * list of parameters' serialization

-	 */

-	private List<String> parametersSerialization = new ArrayList<String>();

+    /**

+     * list of parameters' serialization

+     */

+    private List<String> parametersSerialization = new ArrayList<String>();

 

-	/**

-	 * the name of the udf

-	 */

-	private String genericUDAFName;

+    /**

+     * the name of the udf

+     */

+    private String genericUDAFName;

 

-	/**

-	 * aggregation mode

-	 */

-	private GenericUDAFEvaluator.Mode mode;

+    /**

+     * aggregation mode

+     */

+    private GenericUDAFEvaluator.Mode mode;

 

-	/**

-	 * list of type info

-	 */

-	private List<TypeInfo> types = new ArrayList<TypeInfo>();

+    /**

+     * list of type info

+     */

+    private List<TypeInfo> types = new ArrayList<TypeInfo>();

 

-	/**

-	 * distinct or not

-	 */

-	private boolean distinct;

+    /**

+     * distinct or not

+     */

+    private boolean distinct;

 

-	/**

-	 * the schema of incoming rows

-	 */

-	private Schema rowSchema;

+    /**

+     * the schema of incoming rows

+     */

+    private Schema rowSchema;

 

-	/**

-	 * list of parameters

-	 */

-	private transient List<ExprNodeDesc> parametersOrigin;

+    /**

+     * list of parameters

+     */

+    private transient List<ExprNodeDesc> parametersOrigin;

 

-	/**

-	 * row inspector

-	 */

-	private transient ObjectInspector rowInspector = null;

+    /**

+     * row inspector

+     */

+    private transient ObjectInspector rowInspector = null;

 

-	/**

-	 * output object inspector

-	 */

-	private transient ObjectInspector outputInspector = null;

+    /**

+     * output object inspector

+     */

+    private transient ObjectInspector outputInspector = null;

 

-	/**

-	 * output object inspector

-	 */

-	private transient ObjectInspector outputInspectorPartial = null;

+    /**

+     * output object inspector

+     */

+    private transient ObjectInspector outputInspectorPartial = null;

 

-	/**

-	 * parameter inspectors

-	 */

-	private transient ObjectInspector[] parameterInspectors = null;

+    /**

+     * parameter inspectors

+     */

+    private transient ObjectInspector[] parameterInspectors = null;

 

-	/**

-	 * expression desc

-	 */

-	private transient HashMap<Long, List<ExprNodeDesc>> parameterExprs = new HashMap<Long, List<ExprNodeDesc>>();

+    /**

+     * expression desc

+     */

+    private transient HashMap<Long, List<ExprNodeDesc>> parameterExprs = new HashMap<Long, List<ExprNodeDesc>>();

 

-	/**

-	 * evaluators

-	 */

-	private transient HashMap<Long, ExprNodeEvaluator[]> evaluators = new HashMap<Long, ExprNodeEvaluator[]>();

+    /**

+     * evaluators

+     */

+    private transient HashMap<Long, ExprNodeEvaluator[]> evaluators = new HashMap<Long, ExprNodeEvaluator[]>();

 

-	/**

-	 * cached parameter objects

-	 */

-	private transient HashMap<Long, Object[]> cachedParameters = new HashMap<Long, Object[]>();

+    /**

+     * cached parameter objects

+     */

+    private transient HashMap<Long, Object[]> cachedParameters = new HashMap<Long, Object[]>();

 

-	/**

-	 * cached row object: one per thread

-	 */

-	private transient HashMap<Long, LazyObject<? extends ObjectInspector>> cachedRowObjects = new HashMap<Long, LazyObject<? extends ObjectInspector>>();

+    /**

+     * cached row object: one per thread

+     */

+    private transient HashMap<Long, LazyObject<? extends ObjectInspector>> cachedRowObjects = new HashMap<Long, LazyObject<? extends ObjectInspector>>();

 

-	/**

-	 * we only use lazy serde to do serialization

-	 */

-	private transient HashMap<Long, SerDe> serDe = new HashMap<Long, SerDe>();

+    /**

+     * we only use lazy serde to do serialization

+     */

+    private transient HashMap<Long, SerDe> serDe = new HashMap<Long, SerDe>();

 

-	/**

-	 * udaf evaluators

-	 */

-	private transient HashMap<Long, GenericUDAFEvaluator> udafsPartial = new HashMap<Long, GenericUDAFEvaluator>();

+    /**

+     * udaf evaluators

+     */

+    private transient HashMap<Long, GenericUDAFEvaluator> udafsPartial = new HashMap<Long, GenericUDAFEvaluator>();

 

-	/**

-	 * udaf evaluators

-	 */

-	private transient HashMap<Long, GenericUDAFEvaluator> udafsComplete = new HashMap<Long, GenericUDAFEvaluator>();

+    /**

+     * udaf evaluators

+     */

+    private transient HashMap<Long, GenericUDAFEvaluator> udafsComplete = new HashMap<Long, GenericUDAFEvaluator>();

 

-	/**

-	 * aggregation function desc

-	 */

-	private transient AggregationDesc aggregator;

+    /**

+     * aggregation function desc

+     */

+    private transient AggregationDesc aggregator;

 

-	/**

-	 * 

-	 * @param aggregator

-	 *            Algebricks function call expression

-	 * @param oi

-	 *            schema

-	 */

-	public AggregationFunctionFactory(

-			AggregateFunctionCallExpression expression, Schema oi,

-			IVariableTypeEnvironment env) throws AlgebricksException {

+    /**

+     * @param aggregator

+     *            Algebricks function call expression

+     * @param oi

+     *            schema

+     */

+    public AggregationFunctionFactory(AggregateFunctionCallExpression expression, Schema oi,

+            IVariableTypeEnvironment env) throws AlgebricksException {

 

-		try {

-			aggregator = (AggregationDesc) ExpressionTranslator

-					.getHiveExpression(expression, env);

-		} catch (Exception e) {

-			e.printStackTrace();

-			throw new AlgebricksException(e.getMessage());

-		}

-		init(aggregator.getParameters(), aggregator.getGenericUDAFName(),

-				aggregator.getMode(), aggregator.getDistinct(), oi);

-	}

+        try {

+            aggregator = (AggregationDesc) ExpressionTranslator.getHiveExpression(expression, env);

+        } catch (Exception e) {

+            e.printStackTrace();

+            throw new AlgebricksException(e.getMessage());

+        }

+        init(aggregator.getParameters(), aggregator.getGenericUDAFName(), aggregator.getMode(),

+                aggregator.getDistinct(), oi);

+    }

 

-	/**

-	 * constructor of aggregation function factory

-	 * 

-	 * @param inputs

-	 * @param name

-	 * @param udafMode

-	 * @param distinct

-	 * @param oi

-	 */

-	private void init(List<ExprNodeDesc> inputs, String name,

-			GenericUDAFEvaluator.Mode udafMode, boolean distinct, Schema oi) {

-		parametersOrigin = inputs;

-		genericUDAFName = name;

-		mode = udafMode;

-		this.distinct = distinct;

-		rowSchema = oi;

+    /**

+     * constructor of aggregation function factory

+     * 

+     * @param inputs

+     * @param name

+     * @param udafMode

+     * @param distinct

+     * @param oi

+     */

+    private void init(List<ExprNodeDesc> inputs, String name, GenericUDAFEvaluator.Mode udafMode, boolean distinct,

+            Schema oi) {

+        parametersOrigin = inputs;

+        genericUDAFName = name;

+        mode = udafMode;

+        this.distinct = distinct;

+        rowSchema = oi;

 

-		for (ExprNodeDesc input : inputs) {

-			TypeInfo type = input.getTypeInfo();

-			if (type instanceof StructTypeInfo) {

-				types.add(TypeInfoFactory.doubleTypeInfo);

-			} else

-				types.add(type);

+        for (ExprNodeDesc input : inputs) {

+            TypeInfo type = input.getTypeInfo();

+            if (type instanceof StructTypeInfo) {

+                types.add(TypeInfoFactory.doubleTypeInfo);

+            } else

+                types.add(type);

 

-			String s = Utilities.serializeExpression(input);

-			parametersSerialization.add(s);

-		}

-	}

+            String s = Utilities.serializeExpression(input);

+            parametersSerialization.add(s);

+        }

+    }

 

-	@Override

-	public synchronized ICopyAggregateFunction createAggregateFunction(

-			IDataOutputProvider provider) throws AlgebricksException {

-		if (parametersOrigin == null) {

-			Configuration config = new Configuration();

-			config.setClassLoader(this.getClass().getClassLoader());

-			/**

-			 * in case of class.forname(...) call in hive code

-			 */

-			Thread.currentThread().setContextClassLoader(

-					this.getClass().getClassLoader());

+    @Override

+    public synchronized ICopyAggregateFunction createAggregateFunction(IDataOutputProvider provider)

+            throws AlgebricksException {

+        if (parametersOrigin == null) {

+            Configuration config = new Configuration();

+            config.setClassLoader(this.getClass().getClassLoader());

+            /**

+             * in case of class.forname(...) call in hive code

+             */

+            Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader());

 

-			parametersOrigin = new ArrayList<ExprNodeDesc>();

-			for (String serialization : parametersSerialization) {

-				parametersOrigin.add(Utilities.deserializeExpression(

-						serialization, config));

-			}

-		}

+            parametersOrigin = new ArrayList<ExprNodeDesc>();

+            for (String serialization : parametersSerialization) {

+                parametersOrigin.add(Utilities.deserializeExpression(serialization, config));

+            }

+        }

 

-		/**

-		 * exprs

-		 */

-		if (parameterExprs == null)

-			parameterExprs = new HashMap<Long, List<ExprNodeDesc>>();

+        /**

+         * exprs

+         */

+        if (parameterExprs == null)

+            parameterExprs = new HashMap<Long, List<ExprNodeDesc>>();

 

-		/**

-		 * evaluators

-		 */

-		if (evaluators == null)

-			evaluators = new HashMap<Long, ExprNodeEvaluator[]>();

+        /**

+         * evaluators

+         */

+        if (evaluators == null)

+            evaluators = new HashMap<Long, ExprNodeEvaluator[]>();

 

-		/**

-		 * cached parameter objects

-		 */

-		if (cachedParameters == null)

-			cachedParameters = new HashMap<Long, Object[]>();

+        /**

+         * cached parameter objects

+         */

+        if (cachedParameters == null)

+            cachedParameters = new HashMap<Long, Object[]>();

 

-		/**

-		 * cached row object: one per thread

-		 */

-		if (cachedRowObjects == null)

-			cachedRowObjects = new HashMap<Long, LazyObject<? extends ObjectInspector>>();

+        /**

+         * cached row object: one per thread

+         */

+        if (cachedRowObjects == null)

+            cachedRowObjects = new HashMap<Long, LazyObject<? extends ObjectInspector>>();

 

-		/**

-		 * we only use lazy serde to do serialization

-		 */

-		if (serDe == null)

-			serDe = new HashMap<Long, SerDe>();

+        /**

+         * we only use lazy serde to do serialization

+         */

+        if (serDe == null)

+            serDe = new HashMap<Long, SerDe>();

 

-		/**

-		 * UDAF functions

-		 */

-		if (udafsComplete == null)

-			udafsComplete = new HashMap<Long, GenericUDAFEvaluator>();

+        /**

+         * UDAF functions

+         */

+        if (udafsComplete == null)

+            udafsComplete = new HashMap<Long, GenericUDAFEvaluator>();

 

-		/**

-		 * UDAF functions

-		 */

-		if (udafsPartial == null)

-			udafsPartial = new HashMap<Long, GenericUDAFEvaluator>();

+        /**

+         * UDAF functions

+         */

+        if (udafsPartial == null)

+            udafsPartial = new HashMap<Long, GenericUDAFEvaluator>();

 

-		if (parameterInspectors == null)

-			parameterInspectors = new ObjectInspector[parametersOrigin.size()];

+        if (parameterInspectors == null)

+            parameterInspectors = new ObjectInspector[parametersOrigin.size()];

 

-		if (rowInspector == null)

-			rowInspector = rowSchema.toObjectInspector();

+        if (rowInspector == null)

+            rowInspector = rowSchema.toObjectInspector();

 

-		// get current thread id

-		long threadId = Thread.currentThread().getId();

+        // get current thread id

+        long threadId = Thread.currentThread().getId();

 

-		/**

-		 * expressions, expressions are thread local

-		 */

-		List<ExprNodeDesc> parameters = parameterExprs.get(threadId);

-		if (parameters == null) {

-			parameters = new ArrayList<ExprNodeDesc>();

-			for (ExprNodeDesc parameter : parametersOrigin)

-				parameters.add(parameter.clone());

-			parameterExprs.put(threadId, parameters);

-		}

+        /**

+         * expressions, expressions are thread local

+         */

+        List<ExprNodeDesc> parameters = parameterExprs.get(threadId);

+        if (parameters == null) {

+            parameters = new ArrayList<ExprNodeDesc>();

+            for (ExprNodeDesc parameter : parametersOrigin)

+                parameters.add(parameter.clone());

+            parameterExprs.put(threadId, parameters);

+        }

 

-		/**

-		 * cached parameter objects

-		 */

-		Object[] cachedParas = cachedParameters.get(threadId);

-		if (cachedParas == null) {

-			cachedParas = new Object[parameters.size()];

-			cachedParameters.put(threadId, cachedParas);

-		}

+        /**

+         * cached parameter objects

+         */

+        Object[] cachedParas = cachedParameters.get(threadId);

+        if (cachedParas == null) {

+            cachedParas = new Object[parameters.size()];

+            cachedParameters.put(threadId, cachedParas);

+        }

 

-		/**

-		 * cached row object: one per thread

-		 */

-		LazyObject<? extends ObjectInspector> cachedRowObject = cachedRowObjects

-				.get(threadId);

-		if (cachedRowObject == null) {

-			cachedRowObject = LazyFactory.createLazyObject(rowInspector);

-			cachedRowObjects.put(threadId, cachedRowObject);

-		}

+        /**

+         * cached row object: one per thread

+         */

+        LazyObject<? extends ObjectInspector> cachedRowObject = cachedRowObjects.get(threadId);

+        if (cachedRowObject == null) {

+            cachedRowObject = LazyFactory.createLazyObject(rowInspector);

+            cachedRowObjects.put(threadId, cachedRowObject);

+        }

 

-		/**

-		 * we only use lazy serde to do serialization

-		 */

-		SerDe lazySer = serDe.get(threadId);

-		if (lazySer == null) {

-			lazySer = new LazySerDe();

-			serDe.put(threadId, lazySer);

-		}

+        /**

+         * we only use lazy serde to do serialization

+         */

+        SerDe lazySer = serDe.get(threadId);

+        if (lazySer == null) {

+            lazySer = new LazySerDe();

+            serDe.put(threadId, lazySer);

+        }

 

-		/**

-		 * evaluators

-		 */

-		ExprNodeEvaluator[] evals = evaluators.get(threadId);

-		if (evals == null) {

-			evals = new ExprNodeEvaluator[parameters.size()];

-			evaluators.put(threadId, evals);

-		}

+        /**

+         * evaluators

+         */

+        ExprNodeEvaluator[] evals = evaluators.get(threadId);

+        if (evals == null) {

+            evals = new ExprNodeEvaluator[parameters.size()];

+            evaluators.put(threadId, evals);

+        }

 

-		GenericUDAFEvaluator udafPartial;

-		GenericUDAFEvaluator udafComplete;

+        GenericUDAFEvaluator udafPartial;

+        GenericUDAFEvaluator udafComplete;

 

-		// initialize object inspectors

-		try {

-			/**

-			 * evaluators, udf, object inpsectors are shared in one thread

-			 */

-			for (int i = 0; i < evals.length; i++) {

-				if (evals[i] == null) {

-					evals[i] = ExprNodeEvaluatorFactory.get(parameters.get(i));

-					if (parameterInspectors[i] == null) {

-						parameterInspectors[i] = evals[i]

-								.initialize(rowInspector);

-					} else {

-						evals[i].initialize(rowInspector);

-					}

-				}

-			}

+        // initialize object inspectors

+        try {

+            /**

+             * evaluators, udf, object inpsectors are shared in one thread

+             */

+            for (int i = 0; i < evals.length; i++) {

+                if (evals[i] == null) {

+                    evals[i] = ExprNodeEvaluatorFactory.get(parameters.get(i));

+                    if (parameterInspectors[i] == null) {

+                        parameterInspectors[i] = evals[i].initialize(rowInspector);

+                    } else {

+                        evals[i].initialize(rowInspector);

+                    }

+                }

+            }

 

-			udafComplete = udafsComplete.get(threadId);

-			if (udafComplete == null) {

-				try {

-					udafComplete = FunctionRegistry.getGenericUDAFEvaluator(

-							genericUDAFName, types, distinct, false);

-				} catch (HiveException e) {

-					throw new AlgebricksException(e);

-				}

-				udafsComplete.put(threadId, udafComplete);

-				udafComplete.init(mode, parameterInspectors);

-			}

+            udafComplete = udafsComplete.get(threadId);

+            if (udafComplete == null) {

+                try {

+                    udafComplete = FunctionRegistry.getGenericUDAFEvaluator(genericUDAFName, types, distinct, false);

+                } catch (HiveException e) {

+                    throw new AlgebricksException(e);

+                }

+                udafsComplete.put(threadId, udafComplete);

+                udafComplete.init(mode, parameterInspectors);

+            }

 

-			// multiple stage group by, determined by the mode parameter

-			if (outputInspector == null)

-				outputInspector = udafComplete.init(mode, parameterInspectors);

+            // multiple stage group by, determined by the mode parameter

+            if (outputInspector == null)

+                outputInspector = udafComplete.init(mode, parameterInspectors);

 

-			// initial partial gby udaf

-			GenericUDAFEvaluator.Mode partialMode;

-			// adjust mode for external groupby

-			if (mode == GenericUDAFEvaluator.Mode.COMPLETE)

-				partialMode = GenericUDAFEvaluator.Mode.PARTIAL1;

-			else if (mode == GenericUDAFEvaluator.Mode.FINAL)

-				partialMode = GenericUDAFEvaluator.Mode.PARTIAL2;

-			else

-				partialMode = mode;

-			udafPartial = udafsPartial.get(threadId);

-			if (udafPartial == null) {

-				try {

-					udafPartial = FunctionRegistry.getGenericUDAFEvaluator(

-							genericUDAFName, types, distinct, false);

-				} catch (HiveException e) {

-					throw new AlgebricksException(e);

-				}

-				udafPartial.init(partialMode, parameterInspectors);

-				udafsPartial.put(threadId, udafPartial);

-			}

+            // initial partial gby udaf

+            GenericUDAFEvaluator.Mode partialMode;

+            // adjust mode for external groupby

+            if (mode == GenericUDAFEvaluator.Mode.COMPLETE)

+                partialMode = GenericUDAFEvaluator.Mode.PARTIAL1;

+            else if (mode == GenericUDAFEvaluator.Mode.FINAL)

+                partialMode = GenericUDAFEvaluator.Mode.PARTIAL2;

+            else

+                partialMode = mode;

+            udafPartial = udafsPartial.get(threadId);

+            if (udafPartial == null) {

+                try {

+                    udafPartial = FunctionRegistry.getGenericUDAFEvaluator(genericUDAFName, types, distinct, false);

+                } catch (HiveException e) {

+                    throw new AlgebricksException(e);

+                }

+                udafPartial.init(partialMode, parameterInspectors);

+                udafsPartial.put(threadId, udafPartial);

+            }

 

-			// multiple stage group by, determined by the mode parameter

-			if (outputInspectorPartial == null)

-				outputInspectorPartial = udafPartial.init(partialMode,

-						parameterInspectors);

-		} catch (Exception e) {

-			e.printStackTrace();

-			throw new AlgebricksException(e);

-		}

+            // multiple stage group by, determined by the mode parameter

+            if (outputInspectorPartial == null)

+                outputInspectorPartial = udafPartial.init(partialMode, parameterInspectors);

+        } catch (Exception e) {

+            e.printStackTrace();

+            throw new AlgebricksException(e);

+        }

 

-		return new AggregationFunctionEvaluator(parameters, types,

-				genericUDAFName, mode, distinct, rowInspector,

-				provider.getDataOutput(), evals, parameterInspectors,

-				cachedParas, lazySer, cachedRowObject, udafPartial,

-				udafComplete, outputInspector, outputInspectorPartial);

-	}

+        return new AggregationFunctionEvaluator(parameters, types, genericUDAFName, mode, distinct, rowInspector,

+                provider.getDataOutput(), evals, parameterInspectors, cachedParas, lazySer, cachedRowObject,

+                udafPartial, udafComplete, outputInspector, outputInspectorPartial);

+    }

 

-	public String toString() {

-		return "aggregation function expression evaluator factory: "

-				+ this.genericUDAFName;

-	}

+    public String toString() {

+        return "aggregation function expression evaluator factory: " + this.genericUDAFName;

+    }

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/AggregationFunctionSerializableFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/AggregationFunctionSerializableFactory.java
index 73717a3..45cfb29 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/AggregationFunctionSerializableFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/AggregationFunctionSerializableFactory.java
@@ -31,351 +31,336 @@
 import edu.uci.ics.hyracks.algebricks.runtime.base.ICopySerializableAggregateFunction;
 import edu.uci.ics.hyracks.algebricks.runtime.base.ICopySerializableAggregateFunctionFactory;
 
-public class AggregationFunctionSerializableFactory implements
-		ICopySerializableAggregateFunctionFactory {
+public class AggregationFunctionSerializableFactory implements ICopySerializableAggregateFunctionFactory {
 
-	private static final long serialVersionUID = 1L;
+    private static final long serialVersionUID = 1L;
 
-	/**
-	 * list of parameters' serialization
-	 */
-	private List<String> parametersSerialization = new ArrayList<String>();
+    /**
+     * list of parameters' serialization
+     */
+    private List<String> parametersSerialization = new ArrayList<String>();
 
-	/**
-	 * the name of the udf
-	 */
-	private String genericUDAFName;
+    /**
+     * the name of the udf
+     */
+    private String genericUDAFName;
 
-	/**
-	 * aggregation mode
-	 */
-	private GenericUDAFEvaluator.Mode mode;
+    /**
+     * aggregation mode
+     */
+    private GenericUDAFEvaluator.Mode mode;
 
-	/**
-	 * list of type info
-	 */
-	private List<TypeInfo> types = new ArrayList<TypeInfo>();
+    /**
+     * list of type info
+     */
+    private List<TypeInfo> types = new ArrayList<TypeInfo>();
 
-	/**
-	 * distinct or not
-	 */
-	private boolean distinct;
+    /**
+     * distinct or not
+     */
+    private boolean distinct;
 
-	/**
-	 * the schema of incoming rows
-	 */
-	private Schema rowSchema;
+    /**
+     * the schema of incoming rows
+     */
+    private Schema rowSchema;
 
-	/**
-	 * list of parameters
-	 */
-	private transient List<ExprNodeDesc> parametersOrigin;
+    /**
+     * list of parameters
+     */
+    private transient List<ExprNodeDesc> parametersOrigin;
 
-	/**
-	 * row inspector
-	 */
-	private transient ObjectInspector rowInspector = null;
+    /**
+     * row inspector
+     */
+    private transient ObjectInspector rowInspector = null;
 
-	/**
-	 * output object inspector
-	 */
-	private transient ObjectInspector outputInspector = null;
+    /**
+     * output object inspector
+     */
+    private transient ObjectInspector outputInspector = null;
 
-	/**
-	 * output object inspector
-	 */
-	private transient ObjectInspector outputInspectorPartial = null;
+    /**
+     * output object inspector
+     */
+    private transient ObjectInspector outputInspectorPartial = null;
 
-	/**
-	 * parameter inspectors
-	 */
-	private transient ObjectInspector[] parameterInspectors = null;
+    /**
+     * parameter inspectors
+     */
+    private transient ObjectInspector[] parameterInspectors = null;
 
-	/**
-	 * expression desc
-	 */
-	private transient HashMap<Long, List<ExprNodeDesc>> parameterExprs = new HashMap<Long, List<ExprNodeDesc>>();
+    /**
+     * expression desc
+     */
+    private transient HashMap<Long, List<ExprNodeDesc>> parameterExprs = new HashMap<Long, List<ExprNodeDesc>>();
 
-	/**
-	 * evaluators
-	 */
-	private transient HashMap<Long, ExprNodeEvaluator[]> evaluators = new HashMap<Long, ExprNodeEvaluator[]>();
+    /**
+     * evaluators
+     */
+    private transient HashMap<Long, ExprNodeEvaluator[]> evaluators = new HashMap<Long, ExprNodeEvaluator[]>();
 
-	/**
-	 * cached parameter objects
-	 */
-	private transient HashMap<Long, Object[]> cachedParameters = new HashMap<Long, Object[]>();
+    /**
+     * cached parameter objects
+     */
+    private transient HashMap<Long, Object[]> cachedParameters = new HashMap<Long, Object[]>();
 
-	/**
-	 * cached row object: one per thread
-	 */
-	private transient HashMap<Long, LazyObject<? extends ObjectInspector>> cachedRowObjects = new HashMap<Long, LazyObject<? extends ObjectInspector>>();
+    /**
+     * cached row object: one per thread
+     */
+    private transient HashMap<Long, LazyObject<? extends ObjectInspector>> cachedRowObjects = new HashMap<Long, LazyObject<? extends ObjectInspector>>();
 
-	/**
-	 * we only use lazy serde to do serialization
-	 */
-	private transient HashMap<Long, SerDe> serDe = new HashMap<Long, SerDe>();
+    /**
+     * we only use lazy serde to do serialization
+     */
+    private transient HashMap<Long, SerDe> serDe = new HashMap<Long, SerDe>();
 
-	/**
-	 * udaf evaluators
-	 */
-	private transient HashMap<Long, GenericUDAFEvaluator> udafsPartial = new HashMap<Long, GenericUDAFEvaluator>();
+    /**
+     * udaf evaluators
+     */
+    private transient HashMap<Long, GenericUDAFEvaluator> udafsPartial = new HashMap<Long, GenericUDAFEvaluator>();
 
-	/**
-	 * udaf evaluators
-	 */
-	private transient HashMap<Long, GenericUDAFEvaluator> udafsComplete = new HashMap<Long, GenericUDAFEvaluator>();
+    /**
+     * udaf evaluators
+     */
+    private transient HashMap<Long, GenericUDAFEvaluator> udafsComplete = new HashMap<Long, GenericUDAFEvaluator>();
 
-	/**
-	 * aggregation function desc
-	 */
-	private transient AggregationDesc aggregator;
+    /**
+     * aggregation function desc
+     */
+    private transient AggregationDesc aggregator;
 
-	/**
-	 * 
-	 * @param aggregator
-	 *            Algebricks function call expression
-	 * @param oi
-	 *            schema
-	 */
-	public AggregationFunctionSerializableFactory(
-			AggregateFunctionCallExpression expression, Schema oi,
-			IVariableTypeEnvironment env) throws AlgebricksException {
+    /**
+     * @param aggregator
+     *            Algebricks function call expression
+     * @param oi
+     *            schema
+     */
+    public AggregationFunctionSerializableFactory(AggregateFunctionCallExpression expression, Schema oi,
+            IVariableTypeEnvironment env) throws AlgebricksException {
 
-		try {
-			aggregator = (AggregationDesc) ExpressionTranslator
-					.getHiveExpression(expression, env);
-		} catch (Exception e) {
-			e.printStackTrace();
-			throw new AlgebricksException(e.getMessage());
-		}
-		init(aggregator.getParameters(), aggregator.getGenericUDAFName(),
-				aggregator.getMode(), aggregator.getDistinct(), oi);
-	}
+        try {
+            aggregator = (AggregationDesc) ExpressionTranslator.getHiveExpression(expression, env);
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw new AlgebricksException(e.getMessage());
+        }
+        init(aggregator.getParameters(), aggregator.getGenericUDAFName(), aggregator.getMode(),
+                aggregator.getDistinct(), oi);
+    }
 
-	/**
-	 * constructor of aggregation function factory
-	 * 
-	 * @param inputs
-	 * @param name
-	 * @param udafMode
-	 * @param distinct
-	 * @param oi
-	 */
-	private void init(List<ExprNodeDesc> inputs, String name,
-			GenericUDAFEvaluator.Mode udafMode, boolean distinct, Schema oi) {
-		parametersOrigin = inputs;
-		genericUDAFName = name;
-		mode = udafMode;
-		this.distinct = distinct;
-		rowSchema = oi;
+    /**
+     * constructor of aggregation function factory
+     * 
+     * @param inputs
+     * @param name
+     * @param udafMode
+     * @param distinct
+     * @param oi
+     */
+    private void init(List<ExprNodeDesc> inputs, String name, GenericUDAFEvaluator.Mode udafMode, boolean distinct,
+            Schema oi) {
+        parametersOrigin = inputs;
+        genericUDAFName = name;
+        mode = udafMode;
+        this.distinct = distinct;
+        rowSchema = oi;
 
-		for (ExprNodeDesc input : inputs) {
-			TypeInfo type = input.getTypeInfo();
-			if (type instanceof StructTypeInfo) {
-				types.add(TypeInfoFactory.doubleTypeInfo);
-			} else
-				types.add(type);
+        for (ExprNodeDesc input : inputs) {
+            TypeInfo type = input.getTypeInfo();
+            if (type instanceof StructTypeInfo) {
+                types.add(TypeInfoFactory.doubleTypeInfo);
+            } else
+                types.add(type);
 
-			String s = Utilities.serializeExpression(input);
-			parametersSerialization.add(s);
-		}
-	}
+            String s = Utilities.serializeExpression(input);
+            parametersSerialization.add(s);
+        }
+    }
 
-	@Override
-	public synchronized ICopySerializableAggregateFunction createAggregateFunction()
-			throws AlgebricksException {
-		if (parametersOrigin == null) {
-			Configuration config = new Configuration();
-			config.setClassLoader(this.getClass().getClassLoader());
-			/**
-			 * in case of class.forname(...) call in hive code
-			 */
-			Thread.currentThread().setContextClassLoader(
-					this.getClass().getClassLoader());
+    @Override
+    public synchronized ICopySerializableAggregateFunction createAggregateFunction() throws AlgebricksException {
+        if (parametersOrigin == null) {
+            Configuration config = new Configuration();
+            config.setClassLoader(this.getClass().getClassLoader());
+            /**
+             * in case of class.forname(...) call in hive code
+             */
+            Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader());
 
-			parametersOrigin = new ArrayList<ExprNodeDesc>();
-			for (String serialization : parametersSerialization) {
-				parametersOrigin.add(Utilities.deserializeExpression(
-						serialization, config));
-			}
-		}
+            parametersOrigin = new ArrayList<ExprNodeDesc>();
+            for (String serialization : parametersSerialization) {
+                parametersOrigin.add(Utilities.deserializeExpression(serialization, config));
+            }
+        }
 
-		/**
-		 * exprs
-		 */
-		if (parameterExprs == null)
-			parameterExprs = new HashMap<Long, List<ExprNodeDesc>>();
+        /**
+         * exprs
+         */
+        if (parameterExprs == null)
+            parameterExprs = new HashMap<Long, List<ExprNodeDesc>>();
 
-		/**
-		 * evaluators
-		 */
-		if (evaluators == null)
-			evaluators = new HashMap<Long, ExprNodeEvaluator[]>();
+        /**
+         * evaluators
+         */
+        if (evaluators == null)
+            evaluators = new HashMap<Long, ExprNodeEvaluator[]>();
 
-		/**
-		 * cached parameter objects
-		 */
-		if (cachedParameters == null)
-			cachedParameters = new HashMap<Long, Object[]>();
+        /**
+         * cached parameter objects
+         */
+        if (cachedParameters == null)
+            cachedParameters = new HashMap<Long, Object[]>();
 
-		/**
-		 * cached row object: one per thread
-		 */
-		if (cachedRowObjects == null)
-			cachedRowObjects = new HashMap<Long, LazyObject<? extends ObjectInspector>>();
+        /**
+         * cached row object: one per thread
+         */
+        if (cachedRowObjects == null)
+            cachedRowObjects = new HashMap<Long, LazyObject<? extends ObjectInspector>>();
 
-		/**
-		 * we only use lazy serde to do serialization
-		 */
-		if (serDe == null)
-			serDe = new HashMap<Long, SerDe>();
+        /**
+         * we only use lazy serde to do serialization
+         */
+        if (serDe == null)
+            serDe = new HashMap<Long, SerDe>();
 
-		/**
-		 * UDAF functions
-		 */
-		if (udafsComplete == null)
-			udafsComplete = new HashMap<Long, GenericUDAFEvaluator>();
+        /**
+         * UDAF functions
+         */
+        if (udafsComplete == null)
+            udafsComplete = new HashMap<Long, GenericUDAFEvaluator>();
 
-		/**
-		 * UDAF functions
-		 */
-		if (udafsPartial == null)
-			udafsPartial = new HashMap<Long, GenericUDAFEvaluator>();
+        /**
+         * UDAF functions
+         */
+        if (udafsPartial == null)
+            udafsPartial = new HashMap<Long, GenericUDAFEvaluator>();
 
-		if (parameterInspectors == null)
-			parameterInspectors = new ObjectInspector[parametersOrigin.size()];
+        if (parameterInspectors == null)
+            parameterInspectors = new ObjectInspector[parametersOrigin.size()];
 
-		if (rowInspector == null)
-			rowInspector = rowSchema.toObjectInspector();
+        if (rowInspector == null)
+            rowInspector = rowSchema.toObjectInspector();
 
-		// get current thread id
-		long threadId = Thread.currentThread().getId();
+        // get current thread id
+        long threadId = Thread.currentThread().getId();
 
-		/**
-		 * expressions, expressions are thread local
-		 */
-		List<ExprNodeDesc> parameters = parameterExprs.get(threadId);
-		if (parameters == null) {
-			parameters = new ArrayList<ExprNodeDesc>();
-			for (ExprNodeDesc parameter : parametersOrigin)
-				parameters.add(parameter.clone());
-			parameterExprs.put(threadId, parameters);
-		}
+        /**
+         * expressions, expressions are thread local
+         */
+        List<ExprNodeDesc> parameters = parameterExprs.get(threadId);
+        if (parameters == null) {
+            parameters = new ArrayList<ExprNodeDesc>();
+            for (ExprNodeDesc parameter : parametersOrigin)
+                parameters.add(parameter.clone());
+            parameterExprs.put(threadId, parameters);
+        }
 
-		/**
-		 * cached parameter objects
-		 */
-		Object[] cachedParas = cachedParameters.get(threadId);
-		if (cachedParas == null) {
-			cachedParas = new Object[parameters.size()];
-			cachedParameters.put(threadId, cachedParas);
-		}
+        /**
+         * cached parameter objects
+         */
+        Object[] cachedParas = cachedParameters.get(threadId);
+        if (cachedParas == null) {
+            cachedParas = new Object[parameters.size()];
+            cachedParameters.put(threadId, cachedParas);
+        }
 
-		/**
-		 * cached row object: one per thread
-		 */
-		LazyObject<? extends ObjectInspector> cachedRowObject = cachedRowObjects
-				.get(threadId);
-		if (cachedRowObject == null) {
-			cachedRowObject = LazyFactory.createLazyObject(rowInspector);
-			cachedRowObjects.put(threadId, cachedRowObject);
-		}
+        /**
+         * cached row object: one per thread
+         */
+        LazyObject<? extends ObjectInspector> cachedRowObject = cachedRowObjects.get(threadId);
+        if (cachedRowObject == null) {
+            cachedRowObject = LazyFactory.createLazyObject(rowInspector);
+            cachedRowObjects.put(threadId, cachedRowObject);
+        }
 
-		/**
-		 * we only use lazy serde to do serialization
-		 */
-		SerDe lazySer = serDe.get(threadId);
-		if (lazySer == null) {
-			lazySer = new LazySerDe();
-			serDe.put(threadId, lazySer);
-		}
+        /**
+         * we only use lazy serde to do serialization
+         */
+        SerDe lazySer = serDe.get(threadId);
+        if (lazySer == null) {
+            lazySer = new LazySerDe();
+            serDe.put(threadId, lazySer);
+        }
 
-		/**
-		 * evaluators
-		 */
-		ExprNodeEvaluator[] evals = evaluators.get(threadId);
-		if (evals == null) {
-			evals = new ExprNodeEvaluator[parameters.size()];
-			evaluators.put(threadId, evals);
-		}
+        /**
+         * evaluators
+         */
+        ExprNodeEvaluator[] evals = evaluators.get(threadId);
+        if (evals == null) {
+            evals = new ExprNodeEvaluator[parameters.size()];
+            evaluators.put(threadId, evals);
+        }
 
-		GenericUDAFEvaluator udafPartial;
-		GenericUDAFEvaluator udafComplete;
+        GenericUDAFEvaluator udafPartial;
+        GenericUDAFEvaluator udafComplete;
 
-		// initialize object inspectors
-		try {
-			/**
-			 * evaluators, udf, object inpsectors are shared in one thread
-			 */
-			for (int i = 0; i < evals.length; i++) {
-				if (evals[i] == null) {
-					evals[i] = ExprNodeEvaluatorFactory.get(parameters.get(i));
-					if (parameterInspectors[i] == null) {
-						parameterInspectors[i] = evals[i]
-								.initialize(rowInspector);
-					} else {
-						evals[i].initialize(rowInspector);
-					}
-				}
-			}
+        // initialize object inspectors
+        try {
+            /**
+             * evaluators, udf, object inpsectors are shared in one thread
+             */
+            for (int i = 0; i < evals.length; i++) {
+                if (evals[i] == null) {
+                    evals[i] = ExprNodeEvaluatorFactory.get(parameters.get(i));
+                    if (parameterInspectors[i] == null) {
+                        parameterInspectors[i] = evals[i].initialize(rowInspector);
+                    } else {
+                        evals[i].initialize(rowInspector);
+                    }
+                }
+            }
 
-			udafComplete = udafsComplete.get(threadId);
-			if (udafComplete == null) {
-				try {
-					udafComplete = FunctionRegistry.getGenericUDAFEvaluator(
-							genericUDAFName, types, distinct, false);
-				} catch (HiveException e) {
-					throw new AlgebricksException(e);
-				}
-				udafsComplete.put(threadId, udafComplete);
-				udafComplete.init(mode, parameterInspectors);
-			}
+            udafComplete = udafsComplete.get(threadId);
+            if (udafComplete == null) {
+                try {
+                    udafComplete = FunctionRegistry.getGenericUDAFEvaluator(genericUDAFName, types, distinct, false);
+                } catch (HiveException e) {
+                    throw new AlgebricksException(e);
+                }
+                udafsComplete.put(threadId, udafComplete);
+                udafComplete.init(mode, parameterInspectors);
+            }
 
-			// multiple stage group by, determined by the mode parameter
-			if (outputInspector == null)
-				outputInspector = udafComplete.init(mode, parameterInspectors);
+            // multiple stage group by, determined by the mode parameter
+            if (outputInspector == null)
+                outputInspector = udafComplete.init(mode, parameterInspectors);
 
-			// initial partial gby udaf
-			GenericUDAFEvaluator.Mode partialMode;
-			// adjust mode for external groupby
-			if (mode == GenericUDAFEvaluator.Mode.COMPLETE)
-				partialMode = GenericUDAFEvaluator.Mode.PARTIAL1;
-			else if (mode == GenericUDAFEvaluator.Mode.FINAL)
-				partialMode = GenericUDAFEvaluator.Mode.PARTIAL2;
-			else
-				partialMode = mode;
-			udafPartial = udafsPartial.get(threadId);
-			if (udafPartial == null) {
-				try {
-					udafPartial = FunctionRegistry.getGenericUDAFEvaluator(
-							genericUDAFName, types, distinct, false);
-				} catch (HiveException e) {
-					throw new AlgebricksException(e);
-				}
-				udafPartial.init(partialMode, parameterInspectors);
-				udafsPartial.put(threadId, udafPartial);
-			}
+            // initial partial gby udaf
+            GenericUDAFEvaluator.Mode partialMode;
+            // adjust mode for external groupby
+            if (mode == GenericUDAFEvaluator.Mode.COMPLETE)
+                partialMode = GenericUDAFEvaluator.Mode.PARTIAL1;
+            else if (mode == GenericUDAFEvaluator.Mode.FINAL)
+                partialMode = GenericUDAFEvaluator.Mode.PARTIAL2;
+            else
+                partialMode = mode;
+            udafPartial = udafsPartial.get(threadId);
+            if (udafPartial == null) {
+                try {
+                    udafPartial = FunctionRegistry.getGenericUDAFEvaluator(genericUDAFName, types, distinct, false);
+                } catch (HiveException e) {
+                    throw new AlgebricksException(e);
+                }
+                udafPartial.init(partialMode, parameterInspectors);
+                udafsPartial.put(threadId, udafPartial);
+            }
 
-			// multiple stage group by, determined by the mode parameter
-			if (outputInspectorPartial == null)
-				outputInspectorPartial = udafPartial.init(partialMode,
-						parameterInspectors);
-		} catch (Exception e) {
-			e.printStackTrace();
-			throw new AlgebricksException(e);
-		}
+            // multiple stage group by, determined by the mode parameter
+            if (outputInspectorPartial == null)
+                outputInspectorPartial = udafPartial.init(partialMode, parameterInspectors);
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw new AlgebricksException(e);
+        }
 
-		return new AggregatuibFunctionSerializableEvaluator(parameters, types,
-				genericUDAFName, mode, distinct, rowInspector, evals,
-				parameterInspectors, cachedParas, lazySer, cachedRowObject,
-				udafPartial, udafComplete, outputInspector,
-				outputInspectorPartial);
-	}
+        return new AggregatuibFunctionSerializableEvaluator(parameters, types, genericUDAFName, mode, distinct,
+                rowInspector, evals, parameterInspectors, cachedParas, lazySer, cachedRowObject, udafPartial,
+                udafComplete, outputInspector, outputInspectorPartial);
+    }
 
-	public String toString() {
-		return "aggregation function expression evaluator factory: "
-				+ this.genericUDAFName;
-	}
+    public String toString() {
+        return "aggregation function expression evaluator factory: " + this.genericUDAFName;
+    }
 
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/ColumnExpressionEvaluatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/ColumnExpressionEvaluatorFactory.java
index 68bf408..dc21be7 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/ColumnExpressionEvaluatorFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/ColumnExpressionEvaluatorFactory.java
@@ -14,32 +14,28 @@
 

 public class ColumnExpressionEvaluatorFactory implements ICopyEvaluatorFactory {

 

-	private static final long serialVersionUID = 1L;

+    private static final long serialVersionUID = 1L;

 

-	private ExprNodeColumnDesc expr;

+    private ExprNodeColumnDesc expr;

 

-	private Schema inputSchema;

+    private Schema inputSchema;

 

-	public ColumnExpressionEvaluatorFactory(ILogicalExpression expression,

-			Schema schema, IVariableTypeEnvironment env)

-			throws AlgebricksException {

-		try {

-			expr = (ExprNodeColumnDesc) ExpressionTranslator.getHiveExpression(

-					expression, env);

-		} catch (Exception e) {

-			throw new AlgebricksException(e.getMessage());

-		}

-		inputSchema = schema;

-	}

+    public ColumnExpressionEvaluatorFactory(ILogicalExpression expression, Schema schema, IVariableTypeEnvironment env)

+            throws AlgebricksException {

+        try {

+            expr = (ExprNodeColumnDesc) ExpressionTranslator.getHiveExpression(expression, env);

+        } catch (Exception e) {

+            throw new AlgebricksException(e.getMessage());

+        }

+        inputSchema = schema;

+    }

 

-	public ICopyEvaluator createEvaluator(IDataOutputProvider output)

-			throws AlgebricksException {

-		return new ColumnExpressionEvaluator(expr,

-				inputSchema.toObjectInspector(), output);

-	}

+    public ICopyEvaluator createEvaluator(IDataOutputProvider output) throws AlgebricksException {

+        return new ColumnExpressionEvaluator(expr, inputSchema.toObjectInspector(), output);

+    }

 

-	public String toString() {

-		return "column expression evaluator factory: " + expr.toString();

-	}

+    public String toString() {

+        return "column expression evaluator factory: " + expr.toString();

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/ConstantExpressionEvaluatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/ConstantExpressionEvaluatorFactory.java
index e0241a1..69e2171 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/ConstantExpressionEvaluatorFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/ConstantExpressionEvaluatorFactory.java
@@ -12,35 +12,30 @@
 import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;

 import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;

 

-public class ConstantExpressionEvaluatorFactory implements

-		ICopyEvaluatorFactory {

+public class ConstantExpressionEvaluatorFactory implements ICopyEvaluatorFactory {

 

-	private static final long serialVersionUID = 1L;

+    private static final long serialVersionUID = 1L;

 

-	private ExprNodeConstantDesc expr;

+    private ExprNodeConstantDesc expr;

 

-	private Schema schema;

+    private Schema schema;

 

-	public ConstantExpressionEvaluatorFactory(ILogicalExpression expression,

-			Schema inputSchema, IVariableTypeEnvironment env)

-			throws AlgebricksException {

-		try {

-			expr = (ExprNodeConstantDesc) ExpressionTranslator

-					.getHiveExpression(expression, env);

-		} catch (Exception e) {

-			throw new AlgebricksException(e.getMessage());

-		}

-		schema = inputSchema;

-	}

+    public ConstantExpressionEvaluatorFactory(ILogicalExpression expression, Schema inputSchema,

+            IVariableTypeEnvironment env) throws AlgebricksException {

+        try {

+            expr = (ExprNodeConstantDesc) ExpressionTranslator.getHiveExpression(expression, env);

+        } catch (Exception e) {

+            throw new AlgebricksException(e.getMessage());

+        }

+        schema = inputSchema;

+    }

 

-	public ICopyEvaluator createEvaluator(IDataOutputProvider output)

-			throws AlgebricksException {

-		return new ConstantExpressionEvaluator(expr,

-				schema.toObjectInspector(), output);

-	}

+    public ICopyEvaluator createEvaluator(IDataOutputProvider output) throws AlgebricksException {

+        return new ConstantExpressionEvaluator(expr, schema.toObjectInspector(), output);

+    }

 

-	public String toString() {

-		return "constant expression evaluator factory: " + expr.toString();

-	}

+    public String toString() {

+        return "constant expression evaluator factory: " + expr.toString();

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/FieldExpressionEvaluatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/FieldExpressionEvaluatorFactory.java
index 4b5f906..eddfb9b 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/FieldExpressionEvaluatorFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/FieldExpressionEvaluatorFactory.java
@@ -13,32 +13,28 @@
 import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;

 

 public class FieldExpressionEvaluatorFactory implements ICopyEvaluatorFactory {

-	private static final long serialVersionUID = 1L;

+    private static final long serialVersionUID = 1L;

 

-	private ExprNodeFieldDesc expr;

+    private ExprNodeFieldDesc expr;

 

-	private Schema inputSchema;

+    private Schema inputSchema;

 

-	public FieldExpressionEvaluatorFactory(ILogicalExpression expression,

-			Schema schema, IVariableTypeEnvironment env)

-			throws AlgebricksException {

-		try {

-			expr = (ExprNodeFieldDesc) ExpressionTranslator.getHiveExpression(

-					expression, env);

-		} catch (Exception e) {

-			throw new AlgebricksException(e.getMessage());

-		}

-		inputSchema = schema;

-	}

+    public FieldExpressionEvaluatorFactory(ILogicalExpression expression, Schema schema, IVariableTypeEnvironment env)

+            throws AlgebricksException {

+        try {

+            expr = (ExprNodeFieldDesc) ExpressionTranslator.getHiveExpression(expression, env);

+        } catch (Exception e) {

+            throw new AlgebricksException(e.getMessage());

+        }

+        inputSchema = schema;

+    }

 

-	public ICopyEvaluator createEvaluator(IDataOutputProvider output)

-			throws AlgebricksException {

-		return new FieldExpressionEvaluator(expr,

-				inputSchema.toObjectInspector(), output);

-	}

+    public ICopyEvaluator createEvaluator(IDataOutputProvider output) throws AlgebricksException {

+        return new FieldExpressionEvaluator(expr, inputSchema.toObjectInspector(), output);

+    }

 

-	public String toString() {

-		return "field access expression evaluator factory: " + expr.toString();

-	}

+    public String toString() {

+        return "field access expression evaluator factory: " + expr.toString();

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/HiveExpressionRuntimeProvider.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/HiveExpressionRuntimeProvider.java
index 387ca72..842d96e 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/HiveExpressionRuntimeProvider.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/HiveExpressionRuntimeProvider.java
@@ -34,159 +34,134 @@
 import edu.uci.ics.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;

 import edu.uci.ics.hyracks.algebricks.runtime.base.IUnnestingEvaluatorFactory;

 

-public class HiveExpressionRuntimeProvider implements

-		IExpressionRuntimeProvider {

+public class HiveExpressionRuntimeProvider implements IExpressionRuntimeProvider {

 

-	public static final IExpressionRuntimeProvider INSTANCE = new HiveExpressionRuntimeProvider();

+    public static final IExpressionRuntimeProvider INSTANCE = new HiveExpressionRuntimeProvider();

 

-	@Override

-	public IAggregateEvaluatorFactory createAggregateFunctionFactory(

-			AggregateFunctionCallExpression expr, IVariableTypeEnvironment env,

-			IOperatorSchema[] inputSchemas, JobGenContext context)

-			throws AlgebricksException {

-		Schema schema = this.getSchema(inputSchemas[0], env);

-		return new AggregateFunctionFactoryAdapter(

-				new AggregationFunctionFactory(expr, schema, env));

-	}

+    @Override

+    public IAggregateEvaluatorFactory createAggregateFunctionFactory(AggregateFunctionCallExpression expr,

+            IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas, JobGenContext context)

+            throws AlgebricksException {

+        Schema schema = this.getSchema(inputSchemas[0], env);

+        return new AggregateFunctionFactoryAdapter(new AggregationFunctionFactory(expr, schema, env));

+    }

 

-	@Override

-	public ICopySerializableAggregateFunctionFactory createSerializableAggregateFunctionFactory(

-			AggregateFunctionCallExpression expr, IVariableTypeEnvironment env,

-			IOperatorSchema[] inputSchemas, JobGenContext context)

-			throws AlgebricksException {

-		Schema schema = this.getSchema(inputSchemas[0], env);

-		return new AggregationFunctionSerializableFactory(expr, schema, env);

-	}

+    @Override

+    public ICopySerializableAggregateFunctionFactory createSerializableAggregateFunctionFactory(

+            AggregateFunctionCallExpression expr, IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas,

+            JobGenContext context) throws AlgebricksException {

+        Schema schema = this.getSchema(inputSchemas[0], env);

+        return new AggregationFunctionSerializableFactory(expr, schema, env);

+    }

 

-	@Override

-	public IRunningAggregateEvaluatorFactory createRunningAggregateFunctionFactory(

-			StatefulFunctionCallExpression expr, IVariableTypeEnvironment env,

-			IOperatorSchema[] inputSchemas, JobGenContext context)

-			throws AlgebricksException {

-		return null;

-	}

+    @Override

+    public IRunningAggregateEvaluatorFactory createRunningAggregateFunctionFactory(StatefulFunctionCallExpression expr,

+            IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas, JobGenContext context)

+            throws AlgebricksException {

+        return null;

+    }

 

-	@Override

-	public IUnnestingEvaluatorFactory createUnnestingFunctionFactory(

-			UnnestingFunctionCallExpression expr, IVariableTypeEnvironment env,

-			IOperatorSchema[] inputSchemas, JobGenContext context)

-			throws AlgebricksException {

-		Schema schema = this.getSchema(inputSchemas[0], env);

-		return new UnnestingFunctionFactoryAdapter(

-				new UnnestingFunctionFactory(expr, schema, env));

-	}

+    @Override

+    public IUnnestingEvaluatorFactory createUnnestingFunctionFactory(UnnestingFunctionCallExpression expr,

+            IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas, JobGenContext context)

+            throws AlgebricksException {

+        Schema schema = this.getSchema(inputSchemas[0], env);

+        return new UnnestingFunctionFactoryAdapter(new UnnestingFunctionFactory(expr, schema, env));

+    }

 

-	public IScalarEvaluatorFactory createEvaluatorFactory(

-			ILogicalExpression expr, IVariableTypeEnvironment env,

-			IOperatorSchema[] inputSchemas, JobGenContext context)

-			throws AlgebricksException {

-		switch (expr.getExpressionTag()) {

-		case VARIABLE: {

-			VariableReferenceExpression v = (VariableReferenceExpression) expr;

-			return new ScalarEvaluatorFactoryAdapter(

-					createVariableEvaluatorFactory(v, env, inputSchemas,

-							context));

-		}

-		case CONSTANT: {

-			ConstantExpression c = (ConstantExpression) expr;

-			return new ScalarEvaluatorFactoryAdapter(

-					createConstantEvaluatorFactory(c, env, inputSchemas,

-							context));

-		}

-		case FUNCTION_CALL: {

-			AbstractFunctionCallExpression fun = (AbstractFunctionCallExpression) expr;

-			FunctionIdentifier fid = fun.getFunctionIdentifier();

+    public IScalarEvaluatorFactory createEvaluatorFactory(ILogicalExpression expr, IVariableTypeEnvironment env,

+            IOperatorSchema[] inputSchemas, JobGenContext context) throws AlgebricksException {

+        switch (expr.getExpressionTag()) {

+            case VARIABLE: {

+                VariableReferenceExpression v = (VariableReferenceExpression) expr;

+                return new ScalarEvaluatorFactoryAdapter(createVariableEvaluatorFactory(v, env, inputSchemas, context));

+            }

+            case CONSTANT: {

+                ConstantExpression c = (ConstantExpression) expr;

+                return new ScalarEvaluatorFactoryAdapter(createConstantEvaluatorFactory(c, env, inputSchemas, context));

+            }

+            case FUNCTION_CALL: {

+                AbstractFunctionCallExpression fun = (AbstractFunctionCallExpression) expr;

+                FunctionIdentifier fid = fun.getFunctionIdentifier();

 

-			if (fid.getName().equals(ExpressionConstant.FIELDACCESS)) {

-				return new ScalarEvaluatorFactoryAdapter(

-						createFieldExpressionEvaluatorFactory(fun, env,

-								inputSchemas, context));

-			}

+                if (fid.getName().equals(ExpressionConstant.FIELDACCESS)) {

+                    return new ScalarEvaluatorFactoryAdapter(createFieldExpressionEvaluatorFactory(fun, env,

+                            inputSchemas, context));

+                }

 

-			if (fid.getName().equals(ExpressionConstant.FIELDACCESS)) {

-				return new ScalarEvaluatorFactoryAdapter(

-						createNullExpressionEvaluatorFactory(fun, env,

-								inputSchemas, context));

-			}

+                if (fid.getName().equals(ExpressionConstant.FIELDACCESS)) {

+                    return new ScalarEvaluatorFactoryAdapter(createNullExpressionEvaluatorFactory(fun, env,

+                            inputSchemas, context));

+                }

 

-			if (fun.getKind() == FunctionKind.SCALAR) {

-				ScalarFunctionCallExpression scalar = (ScalarFunctionCallExpression) fun;

-				return new ScalarEvaluatorFactoryAdapter(

-						createScalarFunctionEvaluatorFactory(scalar, env,

-								inputSchemas, context));

-			} else {

-				throw new AlgebricksException(

-						"Cannot create evaluator for function " + fun

-								+ " of kind " + fun.getKind());

-			}

-		}

-		default: {

-			throw new IllegalStateException();

-		}

-		}

-	}

+                if (fun.getKind() == FunctionKind.SCALAR) {

+                    ScalarFunctionCallExpression scalar = (ScalarFunctionCallExpression) fun;

+                    return new ScalarEvaluatorFactoryAdapter(createScalarFunctionEvaluatorFactory(scalar, env,

+                            inputSchemas, context));

+                } else {

+                    throw new AlgebricksException("Cannot create evaluator for function " + fun + " of kind "

+                            + fun.getKind());

+                }

+            }

+            default: {

+                throw new IllegalStateException();

+            }

+        }

+    }

 

-	private ICopyEvaluatorFactory createVariableEvaluatorFactory(

-			VariableReferenceExpression expr, IVariableTypeEnvironment env,

-			IOperatorSchema[] inputSchemas, JobGenContext context)

-			throws AlgebricksException {

-		Schema schema = this.getSchema(inputSchemas[0], env);

-		return new ColumnExpressionEvaluatorFactory(expr, schema, env);

-	}

+    private ICopyEvaluatorFactory createVariableEvaluatorFactory(VariableReferenceExpression expr,

+            IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas, JobGenContext context)

+            throws AlgebricksException {

+        Schema schema = this.getSchema(inputSchemas[0], env);

+        return new ColumnExpressionEvaluatorFactory(expr, schema, env);

+    }

 

-	private ICopyEvaluatorFactory createScalarFunctionEvaluatorFactory(

-			AbstractFunctionCallExpression expr, IVariableTypeEnvironment env,

-			IOperatorSchema[] inputSchemas, JobGenContext context)

-			throws AlgebricksException {

-		List<String> names = new ArrayList<String>();

-		List<TypeInfo> types = new ArrayList<TypeInfo>();

-		for (IOperatorSchema inputSchema : inputSchemas) {

-			Schema schema = this.getSchema(inputSchema, env);

-			names.addAll(schema.getNames());

-			types.addAll(schema.getTypes());

-		}

-		Schema inputSchema = new Schema(names, types);

-		return new ScalarFunctionExpressionEvaluatorFactory(expr, inputSchema,

-				env);

-	}

+    private ICopyEvaluatorFactory createScalarFunctionEvaluatorFactory(AbstractFunctionCallExpression expr,

+            IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas, JobGenContext context)

+            throws AlgebricksException {

+        List<String> names = new ArrayList<String>();

+        List<TypeInfo> types = new ArrayList<TypeInfo>();

+        for (IOperatorSchema inputSchema : inputSchemas) {

+            Schema schema = this.getSchema(inputSchema, env);

+            names.addAll(schema.getNames());

+            types.addAll(schema.getTypes());

+        }

+        Schema inputSchema = new Schema(names, types);

+        return new ScalarFunctionExpressionEvaluatorFactory(expr, inputSchema, env);

+    }

 

-	private ICopyEvaluatorFactory createFieldExpressionEvaluatorFactory(

-			AbstractFunctionCallExpression expr, IVariableTypeEnvironment env,

-			IOperatorSchema[] inputSchemas, JobGenContext context)

-			throws AlgebricksException {

-		Schema schema = this.getSchema(inputSchemas[0], env);

-		return new FieldExpressionEvaluatorFactory(expr, schema, env);

-	}

+    private ICopyEvaluatorFactory createFieldExpressionEvaluatorFactory(AbstractFunctionCallExpression expr,

+            IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas, JobGenContext context)

+            throws AlgebricksException {

+        Schema schema = this.getSchema(inputSchemas[0], env);

+        return new FieldExpressionEvaluatorFactory(expr, schema, env);

+    }

 

-	private ICopyEvaluatorFactory createNullExpressionEvaluatorFactory(

-			AbstractFunctionCallExpression expr, IVariableTypeEnvironment env,

-			IOperatorSchema[] inputSchemas, JobGenContext context)

-			throws AlgebricksException {

-		Schema schema = this.getSchema(inputSchemas[0], env);

-		return new NullExpressionEvaluatorFactory(expr, schema, env);

-	}

+    private ICopyEvaluatorFactory createNullExpressionEvaluatorFactory(AbstractFunctionCallExpression expr,

+            IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas, JobGenContext context)

+            throws AlgebricksException {

+        Schema schema = this.getSchema(inputSchemas[0], env);

+        return new NullExpressionEvaluatorFactory(expr, schema, env);

+    }

 

-	private ICopyEvaluatorFactory createConstantEvaluatorFactory(

-			ConstantExpression expr, IVariableTypeEnvironment env,

-			IOperatorSchema[] inputSchemas, JobGenContext context)

-			throws AlgebricksException {

-		Schema schema = this.getSchema(inputSchemas[0], env);

-		return new ConstantExpressionEvaluatorFactory(expr, schema, env);

-	}

+    private ICopyEvaluatorFactory createConstantEvaluatorFactory(ConstantExpression expr, IVariableTypeEnvironment env,

+            IOperatorSchema[] inputSchemas, JobGenContext context) throws AlgebricksException {

+        Schema schema = this.getSchema(inputSchemas[0], env);

+        return new ConstantExpressionEvaluatorFactory(expr, schema, env);

+    }

 

-	private Schema getSchema(IOperatorSchema inputSchema,

-			IVariableTypeEnvironment env) throws AlgebricksException {

-		List<String> names = new ArrayList<String>();

-		List<TypeInfo> types = new ArrayList<TypeInfo>();

-		Iterator<LogicalVariable> variables = inputSchema.iterator();

-		while (variables.hasNext()) {

-			LogicalVariable var = variables.next();

-			names.add(var.toString());

-			types.add((TypeInfo) env.getVarType(var));

-		}

+    private Schema getSchema(IOperatorSchema inputSchema, IVariableTypeEnvironment env) throws AlgebricksException {

+        List<String> names = new ArrayList<String>();

+        List<TypeInfo> types = new ArrayList<TypeInfo>();

+        Iterator<LogicalVariable> variables = inputSchema.iterator();

+        while (variables.hasNext()) {

+            LogicalVariable var = variables.next();

+            names.add(var.toString());

+            types.add((TypeInfo) env.getVarType(var));

+        }

 

-		Schema schema = new Schema(names, types);

-		return schema;

-	}

+        Schema schema = new Schema(names, types);

+        return schema;

+    }

 

 }
\ No newline at end of file
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/NullExpressionEvaluatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/NullExpressionEvaluatorFactory.java
index 8f516e8..075ed91 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/NullExpressionEvaluatorFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/NullExpressionEvaluatorFactory.java
@@ -14,32 +14,28 @@
 

 public class NullExpressionEvaluatorFactory implements ICopyEvaluatorFactory {

 

-	private static final long serialVersionUID = 1L;

+    private static final long serialVersionUID = 1L;

 

-	private ExprNodeNullDesc expr;

+    private ExprNodeNullDesc expr;

 

-	private Schema schema;

+    private Schema schema;

 

-	public NullExpressionEvaluatorFactory(ILogicalExpression expression,

-			Schema intputSchema, IVariableTypeEnvironment env)

-			throws AlgebricksException {

-		try {

-			expr = (ExprNodeNullDesc) ExpressionTranslator.getHiveExpression(

-					expression, env);

-		} catch (Exception e) {

-			throw new AlgebricksException(e.getMessage());

-		}

-		schema = intputSchema;

-	}

+    public NullExpressionEvaluatorFactory(ILogicalExpression expression, Schema intputSchema,

+            IVariableTypeEnvironment env) throws AlgebricksException {

+        try {

+            expr = (ExprNodeNullDesc) ExpressionTranslator.getHiveExpression(expression, env);

+        } catch (Exception e) {

+            throw new AlgebricksException(e.getMessage());

+        }

+        schema = intputSchema;

+    }

 

-	public ICopyEvaluator createEvaluator(IDataOutputProvider output)

-			throws AlgebricksException {

-		return new NullExpressionEvaluator(expr, schema.toObjectInspector(),

-				output);

-	}

+    public ICopyEvaluator createEvaluator(IDataOutputProvider output) throws AlgebricksException {

+        return new NullExpressionEvaluator(expr, schema.toObjectInspector(), output);

+    }

 

-	public String toString() {

-		return "null expression evaluator factory: " + expr.toString();

-	}

+    public String toString() {

+        return "null expression evaluator factory: " + expr.toString();

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/ScalarFunctionExpressionEvaluatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/ScalarFunctionExpressionEvaluatorFactory.java
index 262758e..9e10c6b 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/ScalarFunctionExpressionEvaluatorFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/ScalarFunctionExpressionEvaluatorFactory.java
@@ -14,64 +14,56 @@
 import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;

 import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;

 

-public class ScalarFunctionExpressionEvaluatorFactory implements

-		ICopyEvaluatorFactory {

+public class ScalarFunctionExpressionEvaluatorFactory implements ICopyEvaluatorFactory {

 

-	private static final long serialVersionUID = 1L;

+    private static final long serialVersionUID = 1L;

 

-	private transient ExprNodeGenericFuncDesc expr;

+    private transient ExprNodeGenericFuncDesc expr;

 

-	private String exprSerialization;

+    private String exprSerialization;

 

-	private Schema inputSchema;

+    private Schema inputSchema;

 

-	private transient Configuration config;

+    private transient Configuration config;

 

-	public ScalarFunctionExpressionEvaluatorFactory(

-			ILogicalExpression expression, Schema schema,

-			IVariableTypeEnvironment env) throws AlgebricksException {

-		try {

-			expr = (ExprNodeGenericFuncDesc) ExpressionTranslator

-					.getHiveExpression(expression, env);

+    public ScalarFunctionExpressionEvaluatorFactory(ILogicalExpression expression, Schema schema,

+            IVariableTypeEnvironment env) throws AlgebricksException {

+        try {

+            expr = (ExprNodeGenericFuncDesc) ExpressionTranslator.getHiveExpression(expression, env);

 

-			exprSerialization = Utilities.serializeExpression(expr);

+            exprSerialization = Utilities.serializeExpression(expr);

 

-		} catch (Exception e) {

-			e.printStackTrace();

-			throw new AlgebricksException(e.getMessage());

-		}

-		inputSchema = schema;

-	}

+        } catch (Exception e) {

+            e.printStackTrace();

+            throw new AlgebricksException(e.getMessage());

+        }

+        inputSchema = schema;

+    }

 

-	public synchronized ICopyEvaluator createEvaluator(

-			IDataOutputProvider output) throws AlgebricksException {

-		if (expr == null) {

-			configClassLoader();

-			expr = (ExprNodeGenericFuncDesc) Utilities.deserializeExpression(

-					exprSerialization, config);

-		}

+    public synchronized ICopyEvaluator createEvaluator(IDataOutputProvider output) throws AlgebricksException {

+        if (expr == null) {

+            configClassLoader();

+            expr = (ExprNodeGenericFuncDesc) Utilities.deserializeExpression(exprSerialization, config);

+        }

 

-		ExprNodeGenericFuncDesc funcDesc = (ExprNodeGenericFuncDesc) expr

-				.clone();

-		return new FunctionExpressionEvaluator(funcDesc,

-				inputSchema.toObjectInspector(), output);

-	}

+        ExprNodeGenericFuncDesc funcDesc = (ExprNodeGenericFuncDesc) expr.clone();

+        return new FunctionExpressionEvaluator(funcDesc, inputSchema.toObjectInspector(), output);

+    }

 

-	private void configClassLoader() {

-		config = new Configuration();

-		ClassLoader loader = this.getClass().getClassLoader();

-		config.setClassLoader(loader);

-		Thread.currentThread().setContextClassLoader(loader);

-	}

+    private void configClassLoader() {

+        config = new Configuration();

+        ClassLoader loader = this.getClass().getClassLoader();

+        config.setClassLoader(loader);

+        Thread.currentThread().setContextClassLoader(loader);

+    }

 

-	public String toString() {

-		if (expr == null) {

-			configClassLoader();

-			expr = (ExprNodeGenericFuncDesc) Utilities.deserializeExpression(

-					exprSerialization, new Configuration());

-		}

+    public String toString() {

+        if (expr == null) {

+            configClassLoader();

+            expr = (ExprNodeGenericFuncDesc) Utilities.deserializeExpression(exprSerialization, new Configuration());

+        }

 

-		return "function expression evaluator factory: " + expr.getExprString();

-	}

+        return "function expression evaluator factory: " + expr.getExprString();

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/UnnestingFunctionFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/UnnestingFunctionFactory.java
index 1d77737..4657ae1 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/UnnestingFunctionFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/UnnestingFunctionFactory.java
@@ -14,31 +14,27 @@
 

 public class UnnestingFunctionFactory implements ICopyUnnestingFunctionFactory {

 

-	private static final long serialVersionUID = 1L;

+    private static final long serialVersionUID = 1L;

 

-	private UDTFDesc expr;

+    private UDTFDesc expr;

 

-	private Schema inputSchema;

+    private Schema inputSchema;

 

-	private int[] columns;

+    private int[] columns;

 

-	public UnnestingFunctionFactory(ILogicalExpression expression,

-			Schema schema, IVariableTypeEnvironment env)

-			throws AlgebricksException {

-		try {

-			expr = (UDTFDesc) ExpressionTranslator.getHiveExpression(

-					expression, env);

-		} catch (Exception e) {

-			throw new AlgebricksException(e.getMessage());

-		}

-		inputSchema = schema;

-	}

+    public UnnestingFunctionFactory(ILogicalExpression expression, Schema schema, IVariableTypeEnvironment env)

+            throws AlgebricksException {

+        try {

+            expr = (UDTFDesc) ExpressionTranslator.getHiveExpression(expression, env);

+        } catch (Exception e) {

+            throw new AlgebricksException(e.getMessage());

+        }

+        inputSchema = schema;

+    }

 

-	@Override

-	public ICopyUnnestingFunction createUnnestingFunction(

-			IDataOutputProvider provider) throws AlgebricksException {

-		return new UDTFFunctionEvaluator(expr, inputSchema, columns,

-				provider.getDataOutput());

-	}

+    @Override

+    public ICopyUnnestingFunction createUnnestingFunction(IDataOutputProvider provider) throws AlgebricksException {

+        return new UDTFFunctionEvaluator(expr, inputSchema, columns, provider.getDataOutput());

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveDoubleBinaryHashFunctionFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveDoubleBinaryHashFunctionFactory.java
index fc302e1..b636009 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveDoubleBinaryHashFunctionFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveDoubleBinaryHashFunctionFactory.java
@@ -4,28 +4,26 @@
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;

 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;

 

-public class HiveDoubleBinaryHashFunctionFactory implements

-		IBinaryHashFunctionFactory {

-	private static final long serialVersionUID = 1L;

+public class HiveDoubleBinaryHashFunctionFactory implements IBinaryHashFunctionFactory {

+    private static final long serialVersionUID = 1L;

 

-	public static HiveDoubleBinaryHashFunctionFactory INSTANCE = new HiveDoubleBinaryHashFunctionFactory();

+    public static HiveDoubleBinaryHashFunctionFactory INSTANCE = new HiveDoubleBinaryHashFunctionFactory();

 

-	private HiveDoubleBinaryHashFunctionFactory() {

-	}

+    private HiveDoubleBinaryHashFunctionFactory() {

+    }

 

-	@Override

-	public IBinaryHashFunction createBinaryHashFunction() {

-		// TODO Auto-generated method stub

-		return new IBinaryHashFunction() {

-			private Double value;

+    @Override

+    public IBinaryHashFunction createBinaryHashFunction() {

+        // TODO Auto-generated method stub

+        return new IBinaryHashFunction() {

+            private Double value;

 

-			@Override

-			public int hash(byte[] bytes, int offset, int length) {

-				value = Double.longBitsToDouble(LazyUtils.byteArrayToLong(

-						bytes, offset));

-				return value.hashCode();

-			}

-		};

-	}

+            @Override

+            public int hash(byte[] bytes, int offset, int length) {

+                value = Double.longBitsToDouble(LazyUtils.byteArrayToLong(bytes, offset));

+                return value.hashCode();

+            }

+        };

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveIntegerBinaryHashFunctionFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveIntegerBinaryHashFunctionFactory.java
index e1a9994..90e6ce4 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveIntegerBinaryHashFunctionFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveIntegerBinaryHashFunctionFactory.java
@@ -5,31 +5,29 @@
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;

 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;

 

-public class HiveIntegerBinaryHashFunctionFactory implements

-		IBinaryHashFunctionFactory {

-	private static final long serialVersionUID = 1L;

+public class HiveIntegerBinaryHashFunctionFactory implements IBinaryHashFunctionFactory {

+    private static final long serialVersionUID = 1L;

 

-	public static IBinaryHashFunctionFactory INSTANCE = new HiveIntegerBinaryHashFunctionFactory();

+    public static IBinaryHashFunctionFactory INSTANCE = new HiveIntegerBinaryHashFunctionFactory();

 

-	private HiveIntegerBinaryHashFunctionFactory() {

-	}

+    private HiveIntegerBinaryHashFunctionFactory() {

+    }

 

-	@Override

-	public IBinaryHashFunction createBinaryHashFunction() {

+    @Override

+    public IBinaryHashFunction createBinaryHashFunction() {

 

-		return new IBinaryHashFunction() {

-			private VInt value = new VInt();

+        return new IBinaryHashFunction() {

+            private VInt value = new VInt();

 

-			@Override

-			public int hash(byte[] bytes, int offset, int length) {

-				LazyUtils.readVInt(bytes, offset, value);

-				if (value.length != length)

-					throw new IllegalArgumentException(

-							"length mismatch in int hash function actual: "

-									+ length + " expected " + value.length);

-				return value.value;

-			}

-		};

-	}

+            @Override

+            public int hash(byte[] bytes, int offset, int length) {

+                LazyUtils.readVInt(bytes, offset, value);

+                if (value.length != length)

+                    throw new IllegalArgumentException("length mismatch in int hash function actual: " + length

+                            + " expected " + value.length);

+                return value.value;

+            }

+        };

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveLongBinaryHashFunctionFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveLongBinaryHashFunctionFactory.java
index 6f7c6f2..1b61f67 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveLongBinaryHashFunctionFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveLongBinaryHashFunctionFactory.java
@@ -5,27 +5,26 @@
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;

 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;

 

-public class HiveLongBinaryHashFunctionFactory implements

-		IBinaryHashFunctionFactory {

-	private static final long serialVersionUID = 1L;

+public class HiveLongBinaryHashFunctionFactory implements IBinaryHashFunctionFactory {

+    private static final long serialVersionUID = 1L;

 

-	public static IBinaryHashFunctionFactory INSTANCE = new HiveLongBinaryHashFunctionFactory();

+    public static IBinaryHashFunctionFactory INSTANCE = new HiveLongBinaryHashFunctionFactory();

 

-	private HiveLongBinaryHashFunctionFactory() {

-	}

+    private HiveLongBinaryHashFunctionFactory() {

+    }

 

-	@Override

-	public IBinaryHashFunction createBinaryHashFunction() {

+    @Override

+    public IBinaryHashFunction createBinaryHashFunction() {

 

-		return new IBinaryHashFunction() {

-			private VLong value = new VLong();

+        return new IBinaryHashFunction() {

+            private VLong value = new VLong();

 

-			@Override

-			public int hash(byte[] bytes, int offset, int length) {

-				LazyUtils.readVLong(bytes, offset, value);

-				return (int) value.value;

-			}

-		};

-	}

+            @Override

+            public int hash(byte[] bytes, int offset, int length) {

+                LazyUtils.readVLong(bytes, offset, value);

+                return (int) value.value;

+            }

+        };

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveRawBinaryHashFunctionFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveRawBinaryHashFunctionFactory.java
index e03dde0..f2b7b44 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveRawBinaryHashFunctionFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveRawBinaryHashFunctionFactory.java
@@ -3,30 +3,29 @@
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;

 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;

 

-public class HiveRawBinaryHashFunctionFactory implements

-		IBinaryHashFunctionFactory {

-	private static final long serialVersionUID = 1L;

+public class HiveRawBinaryHashFunctionFactory implements IBinaryHashFunctionFactory {

+    private static final long serialVersionUID = 1L;

 

-	public static IBinaryHashFunctionFactory INSTANCE = new HiveRawBinaryHashFunctionFactory();

+    public static IBinaryHashFunctionFactory INSTANCE = new HiveRawBinaryHashFunctionFactory();

 

-	private HiveRawBinaryHashFunctionFactory() {

+    private HiveRawBinaryHashFunctionFactory() {

 

-	}

+    }

 

-	@Override

-	public IBinaryHashFunction createBinaryHashFunction() {

+    @Override

+    public IBinaryHashFunction createBinaryHashFunction() {

 

-		return new IBinaryHashFunction() {

+        return new IBinaryHashFunction() {

 

-			@Override

-			public int hash(byte[] bytes, int offset, int length) {

-				int value = 1;

-				int end = offset + length;

-				for (int i = offset; i < end; i++)

-					value = value * 31 + (int) bytes[i];

-				return value;

-			}

-		};

-	}

+            @Override

+            public int hash(byte[] bytes, int offset, int length) {

+                int value = 1;

+                int end = offset + length;

+                for (int i = offset; i < end; i++)

+                    value = value * 31 + (int) bytes[i];

+                return value;

+            }

+        };

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveStingBinaryHashFunctionFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveStingBinaryHashFunctionFactory.java
index 055c077..a9cf6fd 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveStingBinaryHashFunctionFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveStingBinaryHashFunctionFactory.java
@@ -5,41 +5,37 @@
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;

 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;

 

-public class HiveStingBinaryHashFunctionFactory implements

-		IBinaryHashFunctionFactory {

-	private static final long serialVersionUID = 1L;

+public class HiveStingBinaryHashFunctionFactory implements IBinaryHashFunctionFactory {

+    private static final long serialVersionUID = 1L;

 

-	public static HiveStingBinaryHashFunctionFactory INSTANCE = new HiveStingBinaryHashFunctionFactory();

+    public static HiveStingBinaryHashFunctionFactory INSTANCE = new HiveStingBinaryHashFunctionFactory();

 

-	private HiveStingBinaryHashFunctionFactory() {

-	}

+    private HiveStingBinaryHashFunctionFactory() {

+    }

 

-	@Override

-	public IBinaryHashFunction createBinaryHashFunction() {

-		// TODO Auto-generated method stub

-		return new IBinaryHashFunction() {

-			private VInt len = new VInt();

+    @Override

+    public IBinaryHashFunction createBinaryHashFunction() {

+        // TODO Auto-generated method stub

+        return new IBinaryHashFunction() {

+            private VInt len = new VInt();

 

-			@Override

-			public int hash(byte[] bytes, int offset, int length) {

-				LazyUtils.readVInt(bytes, offset, len);

-				if (len.value + len.length != length)

-					throw new IllegalStateException(

-							"parse string: length mismatch, expected "

-									+ (len.value + len.length) + " but get "

-									+ length);

-				return hashBytes(bytes, offset + len.length, length

-						- len.length);

-			}

+            @Override

+            public int hash(byte[] bytes, int offset, int length) {

+                LazyUtils.readVInt(bytes, offset, len);

+                if (len.value + len.length != length)

+                    throw new IllegalStateException("parse string: length mismatch, expected "

+                            + (len.value + len.length) + " but get " + length);

+                return hashBytes(bytes, offset + len.length, length - len.length);

+            }

 

-			public int hashBytes(byte[] bytes, int offset, int length) {

-				int value = 1;

-				int end = offset + length;

-				for (int i = offset; i < end; i++)

-					value = value * 31 + (int) bytes[i];

-				return value;

-			}

-		};

-	}

+            public int hashBytes(byte[] bytes, int offset, int length) {

+                int value = 1;

+                int end = offset + length;

+                for (int i = offset; i < end; i++)

+                    value = value * 31 + (int) bytes[i];

+                return value;

+            }

+        };

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveDoubleAscNormalizedKeyComputerFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveDoubleAscNormalizedKeyComputerFactory.java
index 5f03962..6ac012f 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveDoubleAscNormalizedKeyComputerFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveDoubleAscNormalizedKeyComputerFactory.java
@@ -4,22 +4,21 @@
 import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
 import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
 
-public class HiveDoubleAscNormalizedKeyComputerFactory implements
-		INormalizedKeyComputerFactory {
+public class HiveDoubleAscNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
 
-	private static final long serialVersionUID = 1L;
+    private static final long serialVersionUID = 1L;
 
-	@Override
-	public INormalizedKeyComputer createNormalizedKeyComputer() {
+    @Override
+    public INormalizedKeyComputer createNormalizedKeyComputer() {
 
-		return new INormalizedKeyComputer() {
+        return new INormalizedKeyComputer() {
 
-			@Override
-			public int normalize(byte[] bytes, int start, int length) {
-				int header = LazyUtils.byteArrayToInt(bytes, start);
-				long unsignedValue = (long) header;
-				return (int) ((unsignedValue - ((long) Integer.MIN_VALUE)) & 0xffffffffL);
-			}
-		};
-	}
+            @Override
+            public int normalize(byte[] bytes, int start, int length) {
+                int header = LazyUtils.byteArrayToInt(bytes, start);
+                long unsignedValue = (long) header;
+                return (int) ((unsignedValue - ((long) Integer.MIN_VALUE)) & 0xffffffffL);
+            }
+        };
+    }
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveDoubleDescNormalizedKeyComputerFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveDoubleDescNormalizedKeyComputerFactory.java
index e4587a2..3044109 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveDoubleDescNormalizedKeyComputerFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveDoubleDescNormalizedKeyComputerFactory.java
@@ -3,24 +3,22 @@
 import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
 import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
 
-public class HiveDoubleDescNormalizedKeyComputerFactory implements
-		INormalizedKeyComputerFactory {
+public class HiveDoubleDescNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
 
-	private static final long serialVersionUID = 1L;
-	private final INormalizedKeyComputerFactory ascNormalizedKeyComputerFactory = new HiveDoubleAscNormalizedKeyComputerFactory();
+    private static final long serialVersionUID = 1L;
+    private final INormalizedKeyComputerFactory ascNormalizedKeyComputerFactory = new HiveDoubleAscNormalizedKeyComputerFactory();
 
-	@Override
-	public INormalizedKeyComputer createNormalizedKeyComputer() {
-		return new INormalizedKeyComputer() {
-			private INormalizedKeyComputer nmkComputer = ascNormalizedKeyComputerFactory
-					.createNormalizedKeyComputer();
+    @Override
+    public INormalizedKeyComputer createNormalizedKeyComputer() {
+        return new INormalizedKeyComputer() {
+            private INormalizedKeyComputer nmkComputer = ascNormalizedKeyComputerFactory.createNormalizedKeyComputer();
 
-			@Override
-			public int normalize(byte[] bytes, int start, int length) {
-				int nk = nmkComputer.normalize(bytes, start, length);
-				return (int) ((long) Integer.MAX_VALUE - (long) (nk - Integer.MIN_VALUE));
-			}
+            @Override
+            public int normalize(byte[] bytes, int start, int length) {
+                int nk = nmkComputer.normalize(bytes, start, length);
+                return (int) ((long) Integer.MAX_VALUE - (long) (nk - Integer.MIN_VALUE));
+            }
 
-		};
-	}
+        };
+    }
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveIntegerAscNormalizedKeyComputerFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveIntegerAscNormalizedKeyComputerFactory.java
index 2ff390a..a1d4d48 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveIntegerAscNormalizedKeyComputerFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveIntegerAscNormalizedKeyComputerFactory.java
@@ -5,27 +5,25 @@
 import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
 import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
 
-public class HiveIntegerAscNormalizedKeyComputerFactory implements
-		INormalizedKeyComputerFactory {
+public class HiveIntegerAscNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
 
-	private static final long serialVersionUID = 1L;
+    private static final long serialVersionUID = 1L;
 
-	@Override
-	public INormalizedKeyComputer createNormalizedKeyComputer() {
+    @Override
+    public INormalizedKeyComputer createNormalizedKeyComputer() {
 
-		return new INormalizedKeyComputer() {
-			private VInt vint = new VInt();
+        return new INormalizedKeyComputer() {
+            private VInt vint = new VInt();
 
-			@Override
-			public int normalize(byte[] bytes, int start, int length) {
-				LazyUtils.readVInt(bytes, start, vint);
-				if (vint.length != length)
-					throw new IllegalArgumentException(
-							"length mismatch in int comparator function actual: "
-									+ vint.length + " expected " + length);
-				long unsignedValue = (long) vint.value;
-				return (int) ((unsignedValue - ((long) Integer.MIN_VALUE)) & 0xffffffffL);
-			}
-		};
-	}
+            @Override
+            public int normalize(byte[] bytes, int start, int length) {
+                LazyUtils.readVInt(bytes, start, vint);
+                if (vint.length != length)
+                    throw new IllegalArgumentException("length mismatch in int comparator function actual: "
+                            + vint.length + " expected " + length);
+                long unsignedValue = (long) vint.value;
+                return (int) ((unsignedValue - ((long) Integer.MIN_VALUE)) & 0xffffffffL);
+            }
+        };
+    }
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveIntegerDescNormalizedKeyComputerFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveIntegerDescNormalizedKeyComputerFactory.java
index 8eff1f8..b8a30a8 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveIntegerDescNormalizedKeyComputerFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveIntegerDescNormalizedKeyComputerFactory.java
@@ -5,27 +5,25 @@
 import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
 import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
 
-public class HiveIntegerDescNormalizedKeyComputerFactory implements
-		INormalizedKeyComputerFactory {
+public class HiveIntegerDescNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
 
-	private static final long serialVersionUID = 1L;
+    private static final long serialVersionUID = 1L;
 
-	@Override
-	public INormalizedKeyComputer createNormalizedKeyComputer() {
+    @Override
+    public INormalizedKeyComputer createNormalizedKeyComputer() {
 
-		return new INormalizedKeyComputer() {
-			private VInt vint = new VInt();
+        return new INormalizedKeyComputer() {
+            private VInt vint = new VInt();
 
-			@Override
-			public int normalize(byte[] bytes, int start, int length) {
-				LazyUtils.readVInt(bytes, start, vint);
-				if (vint.length != length)
-					throw new IllegalArgumentException(
-							"length mismatch in int comparator function actual: "
-									+ vint.length + " expected " + length);
-				long unsignedValue = (long) vint.value;
-				return (int) ((long) 0xffffffff - unsignedValue);
-			}
-		};
-	}
+            @Override
+            public int normalize(byte[] bytes, int start, int length) {
+                LazyUtils.readVInt(bytes, start, vint);
+                if (vint.length != length)
+                    throw new IllegalArgumentException("length mismatch in int comparator function actual: "
+                            + vint.length + " expected " + length);
+                long unsignedValue = (long) vint.value;
+                return (int) ((long) 0xffffffff - unsignedValue);
+            }
+        };
+    }
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveLongAscNormalizedKeyComputerFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveLongAscNormalizedKeyComputerFactory.java
index 768eec2..a893d19 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveLongAscNormalizedKeyComputerFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveLongAscNormalizedKeyComputerFactory.java
@@ -5,61 +5,59 @@
 import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
 import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
 
-public class HiveLongAscNormalizedKeyComputerFactory implements
-		INormalizedKeyComputerFactory {
+public class HiveLongAscNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
 
-	private static final long serialVersionUID = 1L;
+    private static final long serialVersionUID = 1L;
 
-	@Override
-	public INormalizedKeyComputer createNormalizedKeyComputer() {
+    @Override
+    public INormalizedKeyComputer createNormalizedKeyComputer() {
 
-		return new INormalizedKeyComputer() {
-			private static final int POSTIVE_LONG_MASK = (3 << 30);
-			private static final int NON_NEGATIVE_INT_MASK = (2 << 30);
-			private static final int NEGATIVE_LONG_MASK = (0 << 30);
-			private VLong vlong = new VLong();
+        return new INormalizedKeyComputer() {
+            private static final int POSTIVE_LONG_MASK = (3 << 30);
+            private static final int NON_NEGATIVE_INT_MASK = (2 << 30);
+            private static final int NEGATIVE_LONG_MASK = (0 << 30);
+            private VLong vlong = new VLong();
 
-			@Override
-			public int normalize(byte[] bytes, int start, int length) {
-				LazyUtils.readVLong(bytes, start, vlong);
-				if (vlong.length != length)
-					throw new IllegalArgumentException(
-							"length mismatch in int comparator function actual: "
-									+ vlong.length + " expected " + length);
-				long value = (long) vlong.value;
-				int highValue = (int) (value >> 32);
-				if (highValue > 0) {
-					/**
-					 * larger than Integer.MAX
-					 */
-					int highNmk = getKey(highValue);
-					highNmk >>= 2;
-					highNmk |= POSTIVE_LONG_MASK;
-					return highNmk;
-				} else if (highValue == 0) {
-					/**
-					 * smaller than Integer.MAX but >=0
-					 */
-					int lowNmk = (int) value;
-					lowNmk >>= 2;
-					lowNmk |= NON_NEGATIVE_INT_MASK;
-					return lowNmk;
-				} else {
-					/**
-					 * less than 0; TODO: have not optimized for that
-					 */
-					int highNmk = getKey(highValue);
-					highNmk >>= 2;
-					highNmk |= NEGATIVE_LONG_MASK;
-					return highNmk;
-				}
-			}
+            @Override
+            public int normalize(byte[] bytes, int start, int length) {
+                LazyUtils.readVLong(bytes, start, vlong);
+                if (vlong.length != length)
+                    throw new IllegalArgumentException("length mismatch in int comparator function actual: "
+                            + vlong.length + " expected " + length);
+                long value = (long) vlong.value;
+                int highValue = (int) (value >> 32);
+                if (highValue > 0) {
+                    /**
+                     * larger than Integer.MAX
+                     */
+                    int highNmk = getKey(highValue);
+                    highNmk >>= 2;
+                    highNmk |= POSTIVE_LONG_MASK;
+                    return highNmk;
+                } else if (highValue == 0) {
+                    /**
+                     * smaller than Integer.MAX but >=0
+                     */
+                    int lowNmk = (int) value;
+                    lowNmk >>= 2;
+                    lowNmk |= NON_NEGATIVE_INT_MASK;
+                    return lowNmk;
+                } else {
+                    /**
+                     * less than 0; TODO: have not optimized for that
+                     */
+                    int highNmk = getKey(highValue);
+                    highNmk >>= 2;
+                    highNmk |= NEGATIVE_LONG_MASK;
+                    return highNmk;
+                }
+            }
 
-			private int getKey(int value) {
-				long unsignedFirstValue = (long) value;
-				int nmk = (int) ((unsignedFirstValue - ((long) Integer.MIN_VALUE)) & 0xffffffffL);
-				return nmk;
-			}
-		};
-	}
+            private int getKey(int value) {
+                long unsignedFirstValue = (long) value;
+                int nmk = (int) ((unsignedFirstValue - ((long) Integer.MIN_VALUE)) & 0xffffffffL);
+                return nmk;
+            }
+        };
+    }
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveLongDescNormalizedKeyComputerFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveLongDescNormalizedKeyComputerFactory.java
index 20ae56a..cc5661b 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveLongDescNormalizedKeyComputerFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveLongDescNormalizedKeyComputerFactory.java
@@ -3,25 +3,23 @@
 import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
 import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
 
-public class HiveLongDescNormalizedKeyComputerFactory implements
-		INormalizedKeyComputerFactory {
+public class HiveLongDescNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
 
-	private static final long serialVersionUID = 1L;
-	private final INormalizedKeyComputerFactory ascNormalizedKeyComputerFactory = new HiveIntegerAscNormalizedKeyComputerFactory();
+    private static final long serialVersionUID = 1L;
+    private final INormalizedKeyComputerFactory ascNormalizedKeyComputerFactory = new HiveIntegerAscNormalizedKeyComputerFactory();
 
-	@Override
-	public INormalizedKeyComputer createNormalizedKeyComputer() {
-		return new INormalizedKeyComputer() {
-			private INormalizedKeyComputer nmkComputer = ascNormalizedKeyComputerFactory
-					.createNormalizedKeyComputer();
+    @Override
+    public INormalizedKeyComputer createNormalizedKeyComputer() {
+        return new INormalizedKeyComputer() {
+            private INormalizedKeyComputer nmkComputer = ascNormalizedKeyComputerFactory.createNormalizedKeyComputer();
 
-			@Override
-			public int normalize(byte[] bytes, int start, int length) {
-				int nk = nmkComputer.normalize(bytes, start, length);
-				return (int) ((long) Integer.MAX_VALUE - (long) (nk - Integer.MIN_VALUE));
-			}
+            @Override
+            public int normalize(byte[] bytes, int start, int length) {
+                int nk = nmkComputer.normalize(bytes, start, length);
+                return (int) ((long) Integer.MAX_VALUE - (long) (nk - Integer.MIN_VALUE));
+            }
 
-		};
-	}
+        };
+    }
 
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveStringAscNormalizedKeyComputerFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveStringAscNormalizedKeyComputerFactory.java
index b16ccba..d0429d6 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveStringAscNormalizedKeyComputerFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveStringAscNormalizedKeyComputerFactory.java
@@ -6,39 +6,35 @@
 import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
 import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
 
-public class HiveStringAscNormalizedKeyComputerFactory implements
-		INormalizedKeyComputerFactory {
+public class HiveStringAscNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
 
-	private static final long serialVersionUID = 1L;
+    private static final long serialVersionUID = 1L;
 
-	@Override
-	public INormalizedKeyComputer createNormalizedKeyComputer() {
+    @Override
+    public INormalizedKeyComputer createNormalizedKeyComputer() {
 
-		return new INormalizedKeyComputer() {
-			private VInt len = new VInt();
+        return new INormalizedKeyComputer() {
+            private VInt len = new VInt();
 
-			@Override
-			public int normalize(byte[] bytes, int start, int length) {
-				LazyUtils.readVInt(bytes, start, len);
+            @Override
+            public int normalize(byte[] bytes, int start, int length) {
+                LazyUtils.readVInt(bytes, start, len);
 
-				if (len.value + len.length != length)
-					throw new IllegalStateException(
-							"parse string: length mismatch, expected "
-									+ (len.value + len.length) + " but get "
-									+ length);
-				int nk = 0;
-				int offset = start + len.length;
-				for (int i = 0; i < 2; ++i) {
-					nk <<= 16;
-					if (i < len.value) {
-						char character = UTF8StringPointable.charAt(bytes,
-								offset);
-						nk += ((int) character) & 0xffff;
-						offset += UTF8StringPointable.charSize(bytes, offset);
-					}
-				}
-				return nk;
-			}
-		};
-	}
+                if (len.value + len.length != length)
+                    throw new IllegalStateException("parse string: length mismatch, expected "
+                            + (len.value + len.length) + " but get " + length);
+                int nk = 0;
+                int offset = start + len.length;
+                for (int i = 0; i < 2; ++i) {
+                    nk <<= 16;
+                    if (i < len.value) {
+                        char character = UTF8StringPointable.charAt(bytes, offset);
+                        nk += ((int) character) & 0xffff;
+                        offset += UTF8StringPointable.charSize(bytes, offset);
+                    }
+                }
+                return nk;
+            }
+        };
+    }
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveStringDescNormalizedKeyComputerFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveStringDescNormalizedKeyComputerFactory.java
index e8978c6..15b2d27 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveStringDescNormalizedKeyComputerFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveStringDescNormalizedKeyComputerFactory.java
@@ -6,35 +6,32 @@
 import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
 import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
 
-public class HiveStringDescNormalizedKeyComputerFactory implements
-		INormalizedKeyComputerFactory {
+public class HiveStringDescNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
 
-	private static final long serialVersionUID = 1L;
+    private static final long serialVersionUID = 1L;
 
-	@Override
-	public INormalizedKeyComputer createNormalizedKeyComputer() {
-		return new INormalizedKeyComputer() {
-			private VInt len = new VInt();
+    @Override
+    public INormalizedKeyComputer createNormalizedKeyComputer() {
+        return new INormalizedKeyComputer() {
+            private VInt len = new VInt();
 
-			@Override
-			public int normalize(byte[] bytes, int start, int length) {
-				LazyUtils.readVInt(bytes, start, len);
-				if (len.value + len.length != length)
-					throw new IllegalStateException(
-							"parse string: length mismatch, expected "
-									+ (len.value + len.length) + " but get "
-									+ length);
-				int nk = 0;
-				int offset = start + len.length;
-				for (int i = 0; i < 2; ++i) {
-					nk <<= 16;
-					if (i < len.value) {
-						nk += ((int) UTF8StringPointable.charAt(bytes, offset)) & 0xffff;
-						offset += UTF8StringPointable.charSize(bytes, offset);
-					}
-				}
-				return (int) ((long) 0xffffffff - (long) nk);
-			}
-		};
-	}
+            @Override
+            public int normalize(byte[] bytes, int start, int length) {
+                LazyUtils.readVInt(bytes, start, len);
+                if (len.value + len.length != length)
+                    throw new IllegalStateException("parse string: length mismatch, expected "
+                            + (len.value + len.length) + " but get " + length);
+                int nk = 0;
+                int offset = start + len.length;
+                for (int i = 0; i < 2; ++i) {
+                    nk <<= 16;
+                    if (i < len.value) {
+                        nk += ((int) UTF8StringPointable.charAt(bytes, offset)) & 0xffff;
+                        offset += UTF8StringPointable.charSize(bytes, offset);
+                    }
+                }
+                return (int) ((long) 0xffffffff - (long) nk);
+            }
+        };
+    }
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/nullwriter/HiveNullWriterFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/nullwriter/HiveNullWriterFactory.java
index 91d08c6..590bd61 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/nullwriter/HiveNullWriterFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/nullwriter/HiveNullWriterFactory.java
@@ -8,21 +8,21 @@
 
 public class HiveNullWriterFactory implements INullWriterFactory {
 
-	private static final long serialVersionUID = 1L;
+    private static final long serialVersionUID = 1L;
 
-	public static HiveNullWriterFactory INSTANCE = new HiveNullWriterFactory();
+    public static HiveNullWriterFactory INSTANCE = new HiveNullWriterFactory();
 
-	@Override
-	public INullWriter createNullWriter() {
-		return new HiveNullWriter();
-	}
+    @Override
+    public INullWriter createNullWriter() {
+        return new HiveNullWriter();
+    }
 }
 
 class HiveNullWriter implements INullWriter {
 
-	@Override
-	public void writeNull(DataOutput out) throws HyracksDataException {
-		// do nothing
-	}
+    @Override
+    public void writeNull(DataOutput out) throws HyracksDataException {
+        // do nothing
+    }
 
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryBooleanInspector.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryBooleanInspector.java
index 3d2b141..677e20e 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryBooleanInspector.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryBooleanInspector.java
@@ -4,17 +4,16 @@
 

 public class HiveBinaryBooleanInspector implements IBinaryBooleanInspector {

 

-	HiveBinaryBooleanInspector() {

-	}

+    HiveBinaryBooleanInspector() {

+    }

 

-	@Override

-	public boolean getBooleanValue(byte[] bytes, int offset, int length) {

-		if (length == 0)

-			return false;

-		if (length != 1)

-			throw new IllegalStateException("boolean field error: with length "

-					+ length);

-		return bytes[0] == 1;

-	}

+    @Override

+    public boolean getBooleanValue(byte[] bytes, int offset, int length) {

+        if (length == 0)

+            return false;

+        if (length != 1)

+            throw new IllegalStateException("boolean field error: with length " + length);

+        return bytes[0] == 1;

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryBooleanInspectorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryBooleanInspectorFactory.java
index 86afbee..22a6065 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryBooleanInspectorFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryBooleanInspectorFactory.java
@@ -4,19 +4,17 @@
 import edu.uci.ics.hyracks.algebricks.data.IBinaryBooleanInspectorFactory;
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
 
-public class HiveBinaryBooleanInspectorFactory implements
-		IBinaryBooleanInspectorFactory {
-	private static final long serialVersionUID = 1L;
-	public static HiveBinaryBooleanInspectorFactory INSTANCE = new HiveBinaryBooleanInspectorFactory();
+public class HiveBinaryBooleanInspectorFactory implements IBinaryBooleanInspectorFactory {
+    private static final long serialVersionUID = 1L;
+    public static HiveBinaryBooleanInspectorFactory INSTANCE = new HiveBinaryBooleanInspectorFactory();
 
-	private HiveBinaryBooleanInspectorFactory() {
+    private HiveBinaryBooleanInspectorFactory() {
 
-	}
+    }
 
-	@Override
-	public IBinaryBooleanInspector createBinaryBooleanInspector(
-			IHyracksTaskContext arg0) {
-		return new HiveBinaryBooleanInspector();
-	}
+    @Override
+    public IBinaryBooleanInspector createBinaryBooleanInspector(IHyracksTaskContext arg0) {
+        return new HiveBinaryBooleanInspector();
+    }
 
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryIntegerInspector.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryIntegerInspector.java
index e82e501..555afee 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryIntegerInspector.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryIntegerInspector.java
@@ -5,19 +5,18 @@
 import edu.uci.ics.hyracks.algebricks.data.IBinaryIntegerInspector;

 

 public class HiveBinaryIntegerInspector implements IBinaryIntegerInspector {

-	private VInt value = new VInt();

+    private VInt value = new VInt();

 

-	HiveBinaryIntegerInspector() {

-	}

+    HiveBinaryIntegerInspector() {

+    }

 

-	@Override

-	public int getIntegerValue(byte[] bytes, int offset, int length) {

-		LazyUtils.readVInt(bytes, offset, value);

-		if (value.length != length)

-			throw new IllegalArgumentException(

-					"length mismatch in int hash function actual: " + length

-							+ " expected " + value.length);

-		return value.value;

-	}

+    @Override

+    public int getIntegerValue(byte[] bytes, int offset, int length) {

+        LazyUtils.readVInt(bytes, offset, value);

+        if (value.length != length)

+            throw new IllegalArgumentException("length mismatch in int hash function actual: " + length + " expected "

+                    + value.length);

+        return value.value;

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryIntegerInspectorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryIntegerInspectorFactory.java
index b44e610..bb93a60 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryIntegerInspectorFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryIntegerInspectorFactory.java
@@ -4,19 +4,17 @@
 import edu.uci.ics.hyracks.algebricks.data.IBinaryIntegerInspectorFactory;
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
 
-public class HiveBinaryIntegerInspectorFactory implements
-		IBinaryIntegerInspectorFactory {
-	private static final long serialVersionUID = 1L;
-	public static HiveBinaryIntegerInspectorFactory INSTANCE = new HiveBinaryIntegerInspectorFactory();
+public class HiveBinaryIntegerInspectorFactory implements IBinaryIntegerInspectorFactory {
+    private static final long serialVersionUID = 1L;
+    public static HiveBinaryIntegerInspectorFactory INSTANCE = new HiveBinaryIntegerInspectorFactory();
 
-	private HiveBinaryIntegerInspectorFactory() {
+    private HiveBinaryIntegerInspectorFactory() {
 
-	}
+    }
 
-	@Override
-	public IBinaryIntegerInspector createBinaryIntegerInspector(
-			IHyracksTaskContext arg0) {
-		return new HiveBinaryIntegerInspector();
-	}
+    @Override
+    public IBinaryIntegerInspector createBinaryIntegerInspector(IHyracksTaskContext arg0) {
+        return new HiveBinaryIntegerInspector();
+    }
 
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveConnectorPolicyAssignmentPolicy.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveConnectorPolicyAssignmentPolicy.java
index 8f559e2..cfceb26 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveConnectorPolicyAssignmentPolicy.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveConnectorPolicyAssignmentPolicy.java
@@ -10,58 +10,59 @@
 import edu.uci.ics.hyracks.dataflow.std.connectors.MToNPartitioningConnectorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.connectors.MToNPartitioningMergingConnectorDescriptor;
 
-public class HiveConnectorPolicyAssignmentPolicy implements
-		IConnectorPolicyAssignmentPolicy {
-	public enum Policy {
-		PIPELINING, SEND_SIDE_MAT_PIPELINING, SEND_SIDE_MAT_BLOCKING, SEND_SIDE_MAT_RECEIVE_SIDE_MAT_BLOCKING;
-	};
+public class HiveConnectorPolicyAssignmentPolicy implements IConnectorPolicyAssignmentPolicy {
+    public enum Policy {
+        PIPELINING,
+        SEND_SIDE_MAT_PIPELINING,
+        SEND_SIDE_MAT_BLOCKING,
+        SEND_SIDE_MAT_RECEIVE_SIDE_MAT_BLOCKING;
+    };
 
-	private static final long serialVersionUID = 1L;
+    private static final long serialVersionUID = 1L;
 
-	private final IConnectorPolicy pipeliningPolicy = new PipeliningConnectorPolicy();
-	private final IConnectorPolicy sendSideMatPipeliningPolicy = new SendSideMaterializedPipeliningConnectorPolicy();
-	private final IConnectorPolicy sendSideMatBlockingPolicy = new SendSideMaterializedBlockingConnectorPolicy();
-	private final IConnectorPolicy sendSideMatReceiveSideMatBlockingPolicy = new SendSideMaterializedReceiveSideMaterializedBlockingConnectorPolicy();
-	private final Policy policy;
+    private final IConnectorPolicy pipeliningPolicy = new PipeliningConnectorPolicy();
+    private final IConnectorPolicy sendSideMatPipeliningPolicy = new SendSideMaterializedPipeliningConnectorPolicy();
+    private final IConnectorPolicy sendSideMatBlockingPolicy = new SendSideMaterializedBlockingConnectorPolicy();
+    private final IConnectorPolicy sendSideMatReceiveSideMatBlockingPolicy = new SendSideMaterializedReceiveSideMaterializedBlockingConnectorPolicy();
+    private final Policy policy;
 
-	public HiveConnectorPolicyAssignmentPolicy(Policy policy) {
-		this.policy = policy;
-	}
+    public HiveConnectorPolicyAssignmentPolicy(Policy policy) {
+        this.policy = policy;
+    }
 
-	@Override
-	public IConnectorPolicy getConnectorPolicyAssignment(
-			IConnectorDescriptor c, int nProducers, int nConsumers,
-			int[] fanouts) {
-		if (c instanceof MToNPartitioningMergingConnectorDescriptor) {
-			// avoid deadlocks
-			switch (policy) {
-			case PIPELINING:
-			case SEND_SIDE_MAT_PIPELINING:
-				return sendSideMatPipeliningPolicy;
-			case SEND_SIDE_MAT_BLOCKING:
-				return sendSideMatBlockingPolicy;
-			case SEND_SIDE_MAT_RECEIVE_SIDE_MAT_BLOCKING:
-				return sendSideMatReceiveSideMatBlockingPolicy;
-			default:
-				return sendSideMatPipeliningPolicy;
-			}
-		} else if (c instanceof MToNPartitioningConnectorDescriptor) {
-			// support different repartitioning policies
-			switch (policy) {
-			case PIPELINING:
-				return pipeliningPolicy;
-			case SEND_SIDE_MAT_PIPELINING:
-				return sendSideMatPipeliningPolicy;
-			case SEND_SIDE_MAT_BLOCKING:
-				return sendSideMatBlockingPolicy;
-			case SEND_SIDE_MAT_RECEIVE_SIDE_MAT_BLOCKING:
-				return sendSideMatReceiveSideMatBlockingPolicy;
-			default:
-				return pipeliningPolicy;
-			}
-		} else {
-			// pipelining for other connectors
-			return pipeliningPolicy;
-		}
-	}
+    @Override
+    public IConnectorPolicy getConnectorPolicyAssignment(IConnectorDescriptor c, int nProducers, int nConsumers,
+            int[] fanouts) {
+        if (c instanceof MToNPartitioningMergingConnectorDescriptor) {
+            // avoid deadlocks
+            switch (policy) {
+                case PIPELINING:
+                case SEND_SIDE_MAT_PIPELINING:
+                    return sendSideMatPipeliningPolicy;
+                case SEND_SIDE_MAT_BLOCKING:
+                    return sendSideMatBlockingPolicy;
+                case SEND_SIDE_MAT_RECEIVE_SIDE_MAT_BLOCKING:
+                    return sendSideMatReceiveSideMatBlockingPolicy;
+                default:
+                    return sendSideMatPipeliningPolicy;
+            }
+        } else if (c instanceof MToNPartitioningConnectorDescriptor) {
+            // support different repartitioning policies
+            switch (policy) {
+                case PIPELINING:
+                    return pipeliningPolicy;
+                case SEND_SIDE_MAT_PIPELINING:
+                    return sendSideMatPipeliningPolicy;
+                case SEND_SIDE_MAT_BLOCKING:
+                    return sendSideMatBlockingPolicy;
+                case SEND_SIDE_MAT_RECEIVE_SIDE_MAT_BLOCKING:
+                    return sendSideMatReceiveSideMatBlockingPolicy;
+                default:
+                    return pipeliningPolicy;
+            }
+        } else {
+            // pipelining for other connectors
+            return pipeliningPolicy;
+        }
+    }
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDataSink.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDataSink.java
index e4fbca5..ccc2e6c 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDataSink.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDataSink.java
@@ -6,27 +6,27 @@
 

 public class HiveDataSink implements IDataSink {

 

-	private Object[] schema;

+    private Object[] schema;

 

-	private Object fsOperator;

+    private Object fsOperator;

 

-	public HiveDataSink(Object sink, Object[] sourceSchema) {

-		schema = sourceSchema;

-		fsOperator = sink;

-	}

+    public HiveDataSink(Object sink, Object[] sourceSchema) {

+        schema = sourceSchema;

+        fsOperator = sink;

+    }

 

-	@Override

-	public Object getId() {

-		return fsOperator;

-	}

+    @Override

+    public Object getId() {

+        return fsOperator;

+    }

 

-	@Override

-	public Object[] getSchemaTypes() {

-		return schema;

-	}

+    @Override

+    public Object[] getSchemaTypes() {

+        return schema;

+    }

 

-	public IPartitioningProperty getPartitioningProperty() {

-		return new RandomPartitioningProperty(new HiveDomain());

-	}

+    public IPartitioningProperty getPartitioningProperty() {

+        return new RandomPartitioningProperty(new HiveDomain());

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDataSource.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDataSource.java
index edff056..67b743b 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDataSource.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDataSource.java
@@ -11,38 +11,37 @@
 

 public class HiveDataSource<P> implements IDataSource<P> {

 

-	private P source;

+    private P source;

 

-	private Object[] schema;

+    private Object[] schema;

 

-	public HiveDataSource(P dataSource, Object[] sourceSchema) {

-		source = dataSource;

-		schema = sourceSchema;

-	}

+    public HiveDataSource(P dataSource, Object[] sourceSchema) {

+        source = dataSource;

+        schema = sourceSchema;

+    }

 

-	@Override

-	public P getId() {

-		return source;

-	}

+    @Override

+    public P getId() {

+        return source;

+    }

 

-	@Override

-	public Object[] getSchemaTypes() {

-		return schema;

-	}

+    @Override

+    public Object[] getSchemaTypes() {

+        return schema;

+    }

 

-	@Override

-	public void computeFDs(List<LogicalVariable> scanVariables,

-			List<FunctionalDependency> fdList) {

-	}

+    @Override

+    public void computeFDs(List<LogicalVariable> scanVariables, List<FunctionalDependency> fdList) {

+    }

 

-	@Override

-	public IDataSourcePropertiesProvider getPropertiesProvider() {

-		return new HiveDataSourcePartitioningProvider();

-	}

+    @Override

+    public IDataSourcePropertiesProvider getPropertiesProvider() {

+        return new HiveDataSourcePartitioningProvider();

+    }

 

-	@Override

-	public String toString() {

-		PartitionDesc desc = (PartitionDesc) source;

-		return desc.getTableName();

-	}

+    @Override

+    public String toString() {

+        PartitionDesc desc = (PartitionDesc) source;

+        return desc.getTableName();

+    }

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDataSourcePartitioningProvider.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDataSourcePartitioningProvider.java
index 08dd684..bb9c4ce 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDataSourcePartitioningProvider.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDataSourcePartitioningProvider.java
@@ -11,16 +11,13 @@
 import edu.uci.ics.hyracks.algebricks.core.algebra.properties.RandomPartitioningProperty;

 import edu.uci.ics.hyracks.algebricks.core.algebra.properties.StructuralPropertiesVector;

 

-public class HiveDataSourcePartitioningProvider implements

-		IDataSourcePropertiesProvider {

+public class HiveDataSourcePartitioningProvider implements IDataSourcePropertiesProvider {

 

-	@Override

-	public IPhysicalPropertiesVector computePropertiesVector(

-			List<LogicalVariable> scanVariables) {

-		IPartitioningProperty property = new RandomPartitioningProperty(

-				new HiveDomain());

-		IPhysicalPropertiesVector vector = new StructuralPropertiesVector(

-				property, new LinkedList<ILocalStructuralProperty>());

-		return vector;

-	}

+    @Override

+    public IPhysicalPropertiesVector computePropertiesVector(List<LogicalVariable> scanVariables) {

+        IPartitioningProperty property = new RandomPartitioningProperty(new HiveDomain());

+        IPhysicalPropertiesVector vector = new StructuralPropertiesVector(property,

+                new LinkedList<ILocalStructuralProperty>());

+        return vector;

+    }

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDomain.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDomain.java
index 0af253a..8b1d3b5 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDomain.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDomain.java
@@ -4,14 +4,14 @@
 

 public class HiveDomain implements INodeDomain {

 

-	@Override

-	public boolean sameAs(INodeDomain domain) {

-		return true;

-	}

+    @Override

+    public boolean sameAs(INodeDomain domain) {

+        return true;

+    }

 

-	@Override

-	public Integer cardinality() {

-		return 0;

-	}

+    @Override

+    public Integer cardinality() {

+        return 0;

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveMetaDataProvider.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveMetaDataProvider.java
index 5782703..a6cdbe0 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveMetaDataProvider.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveMetaDataProvider.java
@@ -32,118 +32,98 @@
 @SuppressWarnings("rawtypes")

 public class HiveMetaDataProvider<S, T> implements IMetadataProvider<S, T> {

 

-	private Operator fileSink;

-	private Schema outputSchema;

-	private HashMap<S, IDataSource<S>> dataSourceMap;

+    private Operator fileSink;

+    private Schema outputSchema;

+    private HashMap<S, IDataSource<S>> dataSourceMap;

 

-	public HiveMetaDataProvider(Operator fsOp, Schema oi,

-			HashMap<S, IDataSource<S>> map) {

-		fileSink = fsOp;

-		outputSchema = oi;

-		dataSourceMap = map;

-	}

+    public HiveMetaDataProvider(Operator fsOp, Schema oi, HashMap<S, IDataSource<S>> map) {

+        fileSink = fsOp;

+        outputSchema = oi;

+        dataSourceMap = map;

+    }

 

-	@Override

-	public IDataSourceIndex<T, S> findDataSourceIndex(T indexId, S dataSourceId)

-			throws AlgebricksException {

-		return null;

-	}

+    @Override

+    public IDataSourceIndex<T, S> findDataSourceIndex(T indexId, S dataSourceId) throws AlgebricksException {

+        return null;

+    }

 

-	@Override

-	public IDataSource<S> findDataSource(S id) throws AlgebricksException {

-		return dataSourceMap.get(id);

-	}

+    @Override

+    public IDataSource<S> findDataSource(S id) throws AlgebricksException {

+        return dataSourceMap.get(id);

+    }

 

-	@Override

-	public boolean scannerOperatorIsLeaf(IDataSource<S> dataSource) {

-		return true;

-	}

+    @Override

+    public boolean scannerOperatorIsLeaf(IDataSource<S> dataSource) {

+        return true;

+    }

 

-	@Override

-	public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getScannerRuntime(

-			IDataSource<S> dataSource, List<LogicalVariable> scanVariables,

-			List<LogicalVariable> projectVariables, boolean projectPushed,

-			IOperatorSchema opSchema, IVariableTypeEnvironment typeEnv,

-			JobGenContext context, JobSpecification jobSpec)

-			throws AlgebricksException {

+    @Override

+    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getScannerRuntime(IDataSource<S> dataSource,

+            List<LogicalVariable> scanVariables, List<LogicalVariable> projectVariables, boolean projectPushed,

+            IOperatorSchema opSchema, IVariableTypeEnvironment typeEnv, JobGenContext context, JobSpecification jobSpec)

+            throws AlgebricksException {

 

-		S desc = dataSource.getId();

-		HiveScanRuntimeGenerator generator = new HiveScanRuntimeGenerator(

-				(PartitionDesc) desc);

-		return generator.getRuntimeOperatorAndConstraint(dataSource,

-				scanVariables, projectVariables, projectPushed, context,

-				jobSpec);

-	}

+        S desc = dataSource.getId();

+        HiveScanRuntimeGenerator generator = new HiveScanRuntimeGenerator((PartitionDesc) desc);

+        return generator.getRuntimeOperatorAndConstraint(dataSource, scanVariables, projectVariables, projectPushed,

+                context, jobSpec);

+    }

 

-	@Override

-	public Pair<IPushRuntimeFactory, AlgebricksPartitionConstraint> getWriteFileRuntime(

-			IDataSink sink, int[] printColumns,

-			IPrinterFactory[] printerFactories, RecordDescriptor inputDesc) {

+    @Override

+    public Pair<IPushRuntimeFactory, AlgebricksPartitionConstraint> getWriteFileRuntime(IDataSink sink,

+            int[] printColumns, IPrinterFactory[] printerFactories, RecordDescriptor inputDesc) {

 

-		HiveWriteRuntimeGenerator generator = new HiveWriteRuntimeGenerator(

-				(FileSinkOperator) fileSink, outputSchema);

-		return generator.getWriterRuntime(inputDesc);

-	}

+        HiveWriteRuntimeGenerator generator = new HiveWriteRuntimeGenerator((FileSinkOperator) fileSink, outputSchema);

+        return generator.getWriterRuntime(inputDesc);

+    }

 

-	@Override

-	public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getDeleteRuntime(

-			IDataSource<S> arg0, IOperatorSchema arg1,

-			List<LogicalVariable> arg2, LogicalVariable arg3,

-			RecordDescriptor arg4, JobGenContext arg5, JobSpecification arg6)

-			throws AlgebricksException {

-		// TODO Auto-generated method stub

-		return null;

-	}

+    @Override

+    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getDeleteRuntime(IDataSource<S> arg0,

+            IOperatorSchema arg1, List<LogicalVariable> arg2, LogicalVariable arg3, RecordDescriptor arg4,

+            JobGenContext arg5, JobSpecification arg6) throws AlgebricksException {

+        // TODO Auto-generated method stub

+        return null;

+    }

 

-	@Override

-	public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getInsertRuntime(

-			IDataSource<S> arg0, IOperatorSchema arg1,

-			List<LogicalVariable> arg2, LogicalVariable arg3,

-			RecordDescriptor arg4, JobGenContext arg5, JobSpecification arg6)

-			throws AlgebricksException {

-		// TODO Auto-generated method stub

-		return null;

-	}

+    @Override

+    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getInsertRuntime(IDataSource<S> arg0,

+            IOperatorSchema arg1, List<LogicalVariable> arg2, LogicalVariable arg3, RecordDescriptor arg4,

+            JobGenContext arg5, JobSpecification arg6) throws AlgebricksException {

+        // TODO Auto-generated method stub

+        return null;

+    }

 

-	@Override

-	public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getWriteResultRuntime(

-			IDataSource<S> arg0, IOperatorSchema arg1,

-			List<LogicalVariable> arg2, LogicalVariable arg3,

-			JobGenContext arg4, JobSpecification arg5)

-			throws AlgebricksException {

-		// TODO Auto-generated method stub

-		return null;

-	}

+    @Override

+    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getWriteResultRuntime(IDataSource<S> arg0,

+            IOperatorSchema arg1, List<LogicalVariable> arg2, LogicalVariable arg3, JobGenContext arg4,

+            JobSpecification arg5) throws AlgebricksException {

+        // TODO Auto-generated method stub

+        return null;

+    }

 

-	@Override

-	public IFunctionInfo lookupFunction(FunctionIdentifier arg0) {

-		return new HiveFunctionInfo(arg0, null);

-	}

+    @Override

+    public IFunctionInfo lookupFunction(FunctionIdentifier arg0) {

+        return new HiveFunctionInfo(arg0, null);

+    }

 

-	@Override

-	public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getIndexInsertRuntime(

-			IDataSourceIndex<T, S> dataSource,

-			IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas,

-			IVariableTypeEnvironment typeEnv,

-			List<LogicalVariable> primaryKeys,

-			List<LogicalVariable> secondaryKeys, ILogicalExpression filterExpr,

-			RecordDescriptor recordDesc, JobGenContext context,

-			JobSpecification spec) throws AlgebricksException {

-		// TODO Auto-generated method stub

-		return null;

-	}

+    @Override

+    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getIndexInsertRuntime(

+            IDataSourceIndex<T, S> dataSource, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas,

+            IVariableTypeEnvironment typeEnv, List<LogicalVariable> primaryKeys, List<LogicalVariable> secondaryKeys,

+            ILogicalExpression filterExpr, RecordDescriptor recordDesc, JobGenContext context, JobSpecification spec)

+            throws AlgebricksException {

+        // TODO Auto-generated method stub

+        return null;

+    }

 

-	@Override

-	public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getIndexDeleteRuntime(

-			IDataSourceIndex<T, S> dataSource,

-			IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas,

-			IVariableTypeEnvironment typeEnv,

-			List<LogicalVariable> primaryKeys,

-			List<LogicalVariable> secondaryKeys, ILogicalExpression filterExpr,

-			RecordDescriptor recordDesc, JobGenContext context,

-			JobSpecification spec) throws AlgebricksException {

-		// TODO Auto-generated method stub

-		return null;

-	}

+    @Override

+    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getIndexDeleteRuntime(

+            IDataSourceIndex<T, S> dataSource, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas,

+            IVariableTypeEnvironment typeEnv, List<LogicalVariable> primaryKeys, List<LogicalVariable> secondaryKeys,

+            ILogicalExpression filterExpr, RecordDescriptor recordDesc, JobGenContext context, JobSpecification spec)

+            throws AlgebricksException {

+        // TODO Auto-generated method stub

+        return null;

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveOperatorSchema.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveOperatorSchema.java
index 83382f0..cdb0e95 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveOperatorSchema.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveOperatorSchema.java
@@ -11,74 +11,74 @@
 

 public class HiveOperatorSchema implements IOperatorSchema {

 

-	private final Map<LogicalVariable, Integer> varMap;

+    private final Map<LogicalVariable, Integer> varMap;

 

-	private final List<LogicalVariable> varList;

+    private final List<LogicalVariable> varList;

 

-	public HiveOperatorSchema() {

-		varMap = new HashMap<LogicalVariable, Integer>();

-		varList = new ArrayList<LogicalVariable>();

-	}

+    public HiveOperatorSchema() {

+        varMap = new HashMap<LogicalVariable, Integer>();

+        varList = new ArrayList<LogicalVariable>();

+    }

 

-	@Override

-	public void addAllVariables(IOperatorSchema source) {

-		for (LogicalVariable v : source) {

-			varMap.put(v, varList.size());

-			varList.add(v);

-		}

-	}

+    @Override

+    public void addAllVariables(IOperatorSchema source) {

+        for (LogicalVariable v : source) {

+            varMap.put(v, varList.size());

+            varList.add(v);

+        }

+    }

 

-	@Override

-	public void addAllNewVariables(IOperatorSchema source) {

-		for (LogicalVariable v : source) {

-			if (varMap.get(v) == null) {

-				varMap.put(v, varList.size());

-				varList.add(v);

-			}

-		}

-	}

+    @Override

+    public void addAllNewVariables(IOperatorSchema source) {

+        for (LogicalVariable v : source) {

+            if (varMap.get(v) == null) {

+                varMap.put(v, varList.size());

+                varList.add(v);

+            }

+        }

+    }

 

-	@Override

-	public int addVariable(LogicalVariable var) {

-		int idx = varList.size();

-		varMap.put(var, idx);

-		varList.add(var);

-		return idx;

-	}

+    @Override

+    public int addVariable(LogicalVariable var) {

+        int idx = varList.size();

+        varMap.put(var, idx);

+        varList.add(var);

+        return idx;

+    }

 

-	@Override

-	public void clear() {

-		varMap.clear();

-		varList.clear();

-	}

+    @Override

+    public void clear() {

+        varMap.clear();

+        varList.clear();

+    }

 

-	@Override

-	public int findVariable(LogicalVariable var) {

-		Integer i = varMap.get(var);

-		if (i == null) {

-			return -1;

-		}

-		return i;

-	}

+    @Override

+    public int findVariable(LogicalVariable var) {

+        Integer i = varMap.get(var);

+        if (i == null) {

+            return -1;

+        }

+        return i;

+    }

 

-	@Override

-	public int getSize() {

-		return varList.size();

-	}

+    @Override

+    public int getSize() {

+        return varList.size();

+    }

 

-	@Override

-	public LogicalVariable getVariable(int index) {

-		return varList.get(index);

-	}

+    @Override

+    public LogicalVariable getVariable(int index) {

+        return varList.get(index);

+    }

 

-	@Override

-	public Iterator<LogicalVariable> iterator() {

-		return varList.iterator();

-	}

+    @Override

+    public Iterator<LogicalVariable> iterator() {

+        return varList.iterator();

+    }

 

-	@Override

-	public String toString() {

-		return varMap.toString();

-	}

+    @Override

+    public String toString() {

+        return varMap.toString();

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveScanRuntimeGenerator.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveScanRuntimeGenerator.java
index 9c8aee4..7bce2c2 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveScanRuntimeGenerator.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveScanRuntimeGenerator.java
@@ -29,89 +29,78 @@
 @SuppressWarnings({ "rawtypes", "deprecation" })

 public class HiveScanRuntimeGenerator {

 

-	private PartitionDesc fileDesc;

+    private PartitionDesc fileDesc;

 

-	private transient Path filePath;

+    private transient Path filePath;

 

-	private String filePathName;

+    private String filePathName;

 

-	private Properties properties;

+    private Properties properties;

 

-	public HiveScanRuntimeGenerator(PartitionDesc path) {

-		fileDesc = path;

-		properties = fileDesc.getProperties();

+    public HiveScanRuntimeGenerator(PartitionDesc path) {

+        fileDesc = path;

+        properties = fileDesc.getProperties();

 

-		String inputPath = (String) properties.getProperty("location");

+        String inputPath = (String) properties.getProperty("location");

 

-		if (inputPath.startsWith("file:")) {

-			// Windows

-			String[] strs = inputPath.split(":");

-			filePathName = strs[strs.length - 1];

-		} else {

-			// Linux

-			filePathName = inputPath;

-		}

+        if (inputPath.startsWith("file:")) {

+            // Windows

+            String[] strs = inputPath.split(":");

+            filePathName = strs[strs.length - 1];

+        } else {

+            // Linux

+            filePathName = inputPath;

+        }

 

-		filePath = new Path(filePathName);

-	}

+        filePath = new Path(filePathName);

+    }

 

-	public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getRuntimeOperatorAndConstraint(

-			IDataSource dataSource, List<LogicalVariable> scanVariables,

-			List<LogicalVariable> projectVariables, boolean projectPushed,

-			JobGenContext context, JobSpecification jobSpec)

-			throws AlgebricksException {

-		// get the correct delimiter from Hive metastore or other data

-		// structures

-		IOperatorSchema propagatedSchema = new HiveOperatorSchema();

+    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getRuntimeOperatorAndConstraint(

+            IDataSource dataSource, List<LogicalVariable> scanVariables, List<LogicalVariable> projectVariables,

+            boolean projectPushed, JobGenContext context, JobSpecification jobSpec) throws AlgebricksException {

+        // get the correct delimiter from Hive metastore or other data

+        // structures

+        IOperatorSchema propagatedSchema = new HiveOperatorSchema();

 

-		List<LogicalVariable> outputVariables = projectPushed ? projectVariables

-				: scanVariables;

-		for (LogicalVariable var : outputVariables)

-			propagatedSchema.addVariable(var);

+        List<LogicalVariable> outputVariables = projectPushed ? projectVariables : scanVariables;

+        for (LogicalVariable var : outputVariables)

+            propagatedSchema.addVariable(var);

 

-		int[] outputColumnsOffset = new int[scanVariables.size()];

-		int i = 0;

-		for (LogicalVariable var : scanVariables)

-			if (outputVariables.contains(var)) {

-				int offset = outputVariables.indexOf(var);

-				outputColumnsOffset[i++] = offset;

-			} else

-				outputColumnsOffset[i++] = -1;

+        int[] outputColumnsOffset = new int[scanVariables.size()];

+        int i = 0;

+        for (LogicalVariable var : scanVariables)

+            if (outputVariables.contains(var)) {

+                int offset = outputVariables.indexOf(var);

+                outputColumnsOffset[i++] = offset;

+            } else

+                outputColumnsOffset[i++] = -1;

 

-		Object[] schemaTypes = dataSource.getSchemaTypes();

-		// get record descriptor

-		RecordDescriptor recDescriptor = mkRecordDescriptor(propagatedSchema,

-				schemaTypes, context);

+        Object[] schemaTypes = dataSource.getSchemaTypes();

+        // get record descriptor

+        RecordDescriptor recDescriptor = mkRecordDescriptor(propagatedSchema, schemaTypes, context);

 

-		// setup the run time operator

-		JobConf conf = ConfUtil.getJobConf(fileDesc.getInputFileFormatClass(),

-				filePath);

-		int clusterSize = ConfUtil.getNCs().length;

-		IFileSplitProvider fsprovider = new HiveFileSplitProvider(conf,

-				filePathName, clusterSize);

-		ITupleParserFactory tupleParserFactory = new HiveTupleParserFactory(

-				fileDesc, conf, outputColumnsOffset);

-		HiveFileScanOperatorDescriptor opDesc = new HiveFileScanOperatorDescriptor(

-				jobSpec, fsprovider, tupleParserFactory, recDescriptor);

+        // setup the run time operator

+        JobConf conf = ConfUtil.getJobConf(fileDesc.getInputFileFormatClass(), filePath);

+        int clusterSize = ConfUtil.getNCs().length;

+        IFileSplitProvider fsprovider = new HiveFileSplitProvider(conf, filePathName, clusterSize);

+        ITupleParserFactory tupleParserFactory = new HiveTupleParserFactory(fileDesc, conf, outputColumnsOffset);

+        HiveFileScanOperatorDescriptor opDesc = new HiveFileScanOperatorDescriptor(jobSpec, fsprovider,

+                tupleParserFactory, recDescriptor);

 

-		return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(

-				opDesc, opDesc.getPartitionConstraint());

-	}

+        return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(opDesc, opDesc.getPartitionConstraint());

+    }

 

-	private static RecordDescriptor mkRecordDescriptor(

-			IOperatorSchema opSchema, Object[] types, JobGenContext context)

-			throws AlgebricksException {

-		ISerializerDeserializer[] fields = new ISerializerDeserializer[opSchema

-				.getSize()];

-		ISerializerDeserializerProvider sdp = context

-				.getSerializerDeserializerProvider();

-		int size = opSchema.getSize();

-		for (int i = 0; i < size; i++) {

-			Object t = types[i];

-			fields[i] = sdp.getSerializerDeserializer(t);

-			i++;

-		}

-		return new RecordDescriptor(fields);

-	}

+    private static RecordDescriptor mkRecordDescriptor(IOperatorSchema opSchema, Object[] types, JobGenContext context)

+            throws AlgebricksException {

+        ISerializerDeserializer[] fields = new ISerializerDeserializer[opSchema.getSize()];

+        ISerializerDeserializerProvider sdp = context.getSerializerDeserializerProvider();

+        int size = opSchema.getSize();

+        for (int i = 0; i < size; i++) {

+            Object t = types[i];

+            fields[i] = sdp.getSerializerDeserializer(t);

+            i++;

+        }

+        return new RecordDescriptor(fields);

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveWriteRuntimeGenerator.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveWriteRuntimeGenerator.java
index d372868..90a7e64 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveWriteRuntimeGenerator.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveWriteRuntimeGenerator.java
@@ -13,28 +13,26 @@
 

 @SuppressWarnings("deprecation")

 public class HiveWriteRuntimeGenerator {

-	private FileSinkOperator fileSink;

+    private FileSinkOperator fileSink;

 

-	private Schema inputSchema;

+    private Schema inputSchema;

 

-	public HiveWriteRuntimeGenerator(FileSinkOperator fsOp, Schema oi) {

-		fileSink = fsOp;

-		inputSchema = oi;

-	}

+    public HiveWriteRuntimeGenerator(FileSinkOperator fsOp, Schema oi) {

+        fileSink = fsOp;

+        inputSchema = oi;

+    }

 

-	/**

-	 * get the write runtime

-	 * 

-	 * @param inputDesc

-	 * @return

-	 */

-	public Pair<IPushRuntimeFactory, AlgebricksPartitionConstraint> getWriterRuntime(

-			RecordDescriptor inputDesc) {

-		JobConf conf = ConfUtil.getJobConf();

-		IPushRuntimeFactory factory = new HivePushRuntimeFactory(inputDesc,

-				conf, fileSink, inputSchema);

-		Pair<IPushRuntimeFactory, AlgebricksPartitionConstraint> pair = new Pair<IPushRuntimeFactory, AlgebricksPartitionConstraint>(

-				factory, null);

-		return pair;

-	}

+    /**

+     * get the write runtime

+     * 

+     * @param inputDesc

+     * @return

+     */

+    public Pair<IPushRuntimeFactory, AlgebricksPartitionConstraint> getWriterRuntime(RecordDescriptor inputDesc) {

+        JobConf conf = ConfUtil.getJobConf();

+        IPushRuntimeFactory factory = new HivePushRuntimeFactory(inputDesc, conf, fileSink, inputSchema);

+        Pair<IPushRuntimeFactory, AlgebricksPartitionConstraint> pair = new Pair<IPushRuntimeFactory, AlgebricksPartitionConstraint>(

+                factory, null);

+        return pair;

+    }

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/AbstractHiveFileSplitProvider.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/AbstractHiveFileSplitProvider.java
index 2f988f8..03f3312 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/AbstractHiveFileSplitProvider.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/AbstractHiveFileSplitProvider.java
@@ -3,17 +3,16 @@
 import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;

 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;

 

-public abstract class AbstractHiveFileSplitProvider implements

-		IFileSplitProvider {

-	private static final long serialVersionUID = 1L;

+public abstract class AbstractHiveFileSplitProvider implements IFileSplitProvider {

+    private static final long serialVersionUID = 1L;

 

-	@Override

-	public FileSplit[] getFileSplits() {

-		// TODO Auto-generated method stub

-		return null;

-	}

+    @Override

+    public FileSplit[] getFileSplits() {

+        // TODO Auto-generated method stub

+        return null;

+    }

 

-	@SuppressWarnings("deprecation")

-	public abstract org.apache.hadoop.mapred.FileSplit[] getFileSplitArray();

+    @SuppressWarnings("deprecation")

+    public abstract org.apache.hadoop.mapred.FileSplit[] getFileSplitArray();

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/AbstractHiveTupleParser.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/AbstractHiveTupleParser.java
index a8addeb..485e1d0 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/AbstractHiveTupleParser.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/AbstractHiveTupleParser.java
@@ -11,19 +11,17 @@
 @SuppressWarnings("deprecation")

 public abstract class AbstractHiveTupleParser implements ITupleParser {

 

-	@Override

-	public void parse(InputStream in, IFrameWriter writer)

-			throws HyracksDataException {

-		// empty implementation

-	}

+    @Override

+    public void parse(InputStream in, IFrameWriter writer) throws HyracksDataException {

+        // empty implementation

+    }

 

-	/**

-	 * method for parsing HDFS file split

-	 * 

-	 * @param split

-	 * @param writer

-	 */

-	abstract public void parse(FileSplit split, IFrameWriter writer)

-			throws HyracksDataException;

+    /**

+     * method for parsing HDFS file split

+     * 

+     * @param split

+     * @param writer

+     */

+    abstract public void parse(FileSplit split, IFrameWriter writer) throws HyracksDataException;

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/HiveFileScanOperatorDescriptor.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/HiveFileScanOperatorDescriptor.java
index 9e62c73..cdf5795 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/HiveFileScanOperatorDescriptor.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/HiveFileScanOperatorDescriptor.java
@@ -40,134 +40,125 @@
 import edu.uci.ics.hyracks.dataflow.std.file.ITupleParserFactory;
 
 @SuppressWarnings("deprecation")
-public class HiveFileScanOperatorDescriptor extends
-		AbstractSingleActivityOperatorDescriptor {
-	private static final long serialVersionUID = 1L;
+public class HiveFileScanOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+    private static final long serialVersionUID = 1L;
 
-	/**
-	 * tuple parser factory
-	 */
-	private final ITupleParserFactory tupleParserFactory;
+    /**
+     * tuple parser factory
+     */
+    private final ITupleParserFactory tupleParserFactory;
 
-	/**
-	 * Hive file split
-	 */
-	private Partition[] parts;
+    /**
+     * Hive file split
+     */
+    private Partition[] parts;
 
-	/**
-	 * IFileSplitProvider
-	 */
-	private IFileSplitProvider fileSplitProvider;
+    /**
+     * IFileSplitProvider
+     */
+    private IFileSplitProvider fileSplitProvider;
 
-	/**
-	 * constrains in the form of host DNS names
-	 */
-	private String[] constraintsByHostNames;
+    /**
+     * constrains in the form of host DNS names
+     */
+    private String[] constraintsByHostNames;
 
-	/**
-	 * ip-to-node controller mapping
-	 */
-	private Map<String, List<String>> ncMapping;
+    /**
+     * ip-to-node controller mapping
+     */
+    private Map<String, List<String>> ncMapping;
 
-	/**
-	 * an array of NCs
-	 */
-	private String[] NCs;
+    /**
+     * an array of NCs
+     */
+    private String[] NCs;
 
-	/**
-	 * 
-	 * @param spec
-	 * @param fsProvider
-	 */
-	public HiveFileScanOperatorDescriptor(JobSpecification spec,
-			IFileSplitProvider fsProvider,
-			ITupleParserFactory tupleParserFactory, RecordDescriptor rDesc) {
-		super(spec, 0, 1);
-		this.tupleParserFactory = tupleParserFactory;
-		recordDescriptors[0] = rDesc;
-		fileSplitProvider = fsProvider;
-	}
+    /**
+     * @param spec
+     * @param fsProvider
+     */
+    public HiveFileScanOperatorDescriptor(JobSpecification spec, IFileSplitProvider fsProvider,
+            ITupleParserFactory tupleParserFactory, RecordDescriptor rDesc) {
+        super(spec, 0, 1);
+        this.tupleParserFactory = tupleParserFactory;
+        recordDescriptors[0] = rDesc;
+        fileSplitProvider = fsProvider;
+    }
 
-	/**
-	 * set partition constraint at the first time it is called the number of
-	 * partitions is obtained from HDFS name node
-	 */
-	public AlgebricksAbsolutePartitionConstraint getPartitionConstraint()
-			throws AlgebricksException {
-		FileSplit[] returnedSplits = ((AbstractHiveFileSplitProvider) fileSplitProvider)
-				.getFileSplitArray();
-		Random random = new Random(System.currentTimeMillis());
-		ncMapping = ConfUtil.getNCMapping();
-		NCs = ConfUtil.getNCs();
+    /**
+     * set partition constraint at the first time it is called the number of
+     * partitions is obtained from HDFS name node
+     */
+    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() throws AlgebricksException {
+        FileSplit[] returnedSplits = ((AbstractHiveFileSplitProvider) fileSplitProvider).getFileSplitArray();
+        Random random = new Random(System.currentTimeMillis());
+        ncMapping = ConfUtil.getNCMapping();
+        NCs = ConfUtil.getNCs();
 
-		int size = 0;
-		for (FileSplit split : returnedSplits)
-			if (split != null)
-				size++;
+        int size = 0;
+        for (FileSplit split : returnedSplits)
+            if (split != null)
+                size++;
 
-		FileSplit[] splits = new FileSplit[size];
-		for (int i = 0; i < returnedSplits.length; i++)
-			if (returnedSplits[i] != null)
-				splits[i] = returnedSplits[i];
+        FileSplit[] splits = new FileSplit[size];
+        for (int i = 0; i < returnedSplits.length; i++)
+            if (returnedSplits[i] != null)
+                splits[i] = returnedSplits[i];
 
-		System.out.println("number of splits: " + splits.length);
-		constraintsByHostNames = new String[splits.length];
-		for (int i = 0; i < splits.length; i++) {
-			try {
-				String[] loc = splits[i].getLocations();
-				Collections.shuffle(Arrays.asList(loc), random);
-				if (loc.length > 0) {
-					InetAddress[] allIps = InetAddress.getAllByName(loc[0]);
-					for (InetAddress ip : allIps) {
-						if (ncMapping.get(ip.getHostAddress()) != null) {
-							List<String> ncs = ncMapping.get(ip
-									.getHostAddress());
-							int pos = random.nextInt(ncs.size());
-							constraintsByHostNames[i] = ncs.get(pos);
-						} else {
-							int pos = random.nextInt(NCs.length);
-							constraintsByHostNames[i] = NCs[pos];
-						}
-					}
-				} else {
-					int pos = random.nextInt(NCs.length);
-					constraintsByHostNames[i] = NCs[pos];
-					if (splits[i].getLength() > 0)
-						throw new IllegalStateException(
-								"non local scanner non locations!!");
-				}
-			} catch (IOException e) {
-				throw new AlgebricksException(e);
-			}
-		}
+        System.out.println("number of splits: " + splits.length);
+        constraintsByHostNames = new String[splits.length];
+        for (int i = 0; i < splits.length; i++) {
+            try {
+                String[] loc = splits[i].getLocations();
+                Collections.shuffle(Arrays.asList(loc), random);
+                if (loc.length > 0) {
+                    InetAddress[] allIps = InetAddress.getAllByName(loc[0]);
+                    for (InetAddress ip : allIps) {
+                        if (ncMapping.get(ip.getHostAddress()) != null) {
+                            List<String> ncs = ncMapping.get(ip.getHostAddress());
+                            int pos = random.nextInt(ncs.size());
+                            constraintsByHostNames[i] = ncs.get(pos);
+                        } else {
+                            int pos = random.nextInt(NCs.length);
+                            constraintsByHostNames[i] = NCs[pos];
+                        }
+                    }
+                } else {
+                    int pos = random.nextInt(NCs.length);
+                    constraintsByHostNames[i] = NCs[pos];
+                    if (splits[i].getLength() > 0)
+                        throw new IllegalStateException("non local scanner non locations!!");
+                }
+            } catch (IOException e) {
+                throw new AlgebricksException(e);
+            }
+        }
 
-		parts = new Partition[splits.length];
-		for (int i = 0; i < splits.length; i++) {
-			parts[i] = new Partition(splits[i]);
-		}
-		return new AlgebricksAbsolutePartitionConstraint(constraintsByHostNames);
-	}
+        parts = new Partition[splits.length];
+        for (int i = 0; i < splits.length; i++) {
+            parts[i] = new Partition(splits[i]);
+        }
+        return new AlgebricksAbsolutePartitionConstraint(constraintsByHostNames);
+    }
 
-	@Override
-	public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
-			IRecordDescriptorProvider recordDescProvider, int partition,
-			int nPartitions) {
+    @Override
+    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
 
-		final ITupleParser tp = tupleParserFactory.createTupleParser(ctx);
-		final int partitionId = partition;
+        final ITupleParser tp = tupleParserFactory.createTupleParser(ctx);
+        final int partitionId = partition;
 
-		return new AbstractUnaryOutputSourceOperatorNodePushable() {
+        return new AbstractUnaryOutputSourceOperatorNodePushable() {
 
-			@Override
-			public void initialize() throws HyracksDataException {
-				writer.open();
-				FileSplit split = parts[partitionId].toFileSplit();
-				if (split == null)
-					throw new HyracksDataException("partition " + partitionId
-							+ " is null!");
-				((AbstractHiveTupleParser) tp).parse(split, writer);
-				writer.close();
-			}
-		};
-	}
+            @Override
+            public void initialize() throws HyracksDataException {
+                writer.open();
+                FileSplit split = parts[partitionId].toFileSplit();
+                if (split == null)
+                    throw new HyracksDataException("partition " + partitionId + " is null!");
+                ((AbstractHiveTupleParser) tp).parse(split, writer);
+                writer.close();
+            }
+        };
+    }
 }
\ No newline at end of file
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/HiveFileSplitProvider.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/HiveFileSplitProvider.java
index d92d353..af52f27 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/HiveFileSplitProvider.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/HiveFileSplitProvider.java
@@ -17,99 +17,92 @@
 

 @SuppressWarnings({ "deprecation", "rawtypes" })

 public class HiveFileSplitProvider extends AbstractHiveFileSplitProvider {

-	private static final long serialVersionUID = 1L;

+    private static final long serialVersionUID = 1L;

 

-	private transient InputFormat format;

-	private transient JobConf conf;

-	private String confContent;

-	final private int nPartition;

-	private transient FileSplit[] splits;

+    private transient InputFormat format;

+    private transient JobConf conf;

+    private String confContent;

+    final private int nPartition;

+    private transient FileSplit[] splits;

 

-	public HiveFileSplitProvider(JobConf conf, String filePath, int nPartition) {

-		format = conf.getInputFormat();

-		this.conf = conf;

-		this.nPartition = nPartition;

-		writeConfContent();

-	}

+    public HiveFileSplitProvider(JobConf conf, String filePath, int nPartition) {

+        format = conf.getInputFormat();

+        this.conf = conf;

+        this.nPartition = nPartition;

+        writeConfContent();

+    }

 

-	private void writeConfContent() {

-		File dir = new File("hadoop-conf-tmp");

-		if (!dir.exists()) {

-			dir.mkdir();

-		}

+    private void writeConfContent() {

+        File dir = new File("hadoop-conf-tmp");

+        if (!dir.exists()) {

+            dir.mkdir();

+        }

 

-		String fileName = "hadoop-conf-tmp/" + UUID.randomUUID()

-				+ System.currentTimeMillis() + ".xml";

-		try {

-			DataOutputStream out = new DataOutputStream(new FileOutputStream(

-					new File(fileName)));

-			conf.writeXml(out);

-			out.close();

+        String fileName = "hadoop-conf-tmp/" + UUID.randomUUID() + System.currentTimeMillis() + ".xml";

+        try {

+            DataOutputStream out = new DataOutputStream(new FileOutputStream(new File(fileName)));

+            conf.writeXml(out);

+            out.close();

 

-			DataInputStream in = new DataInputStream(new FileInputStream(

-					fileName));

-			StringBuffer buffer = new StringBuffer();

-			String line;

-			while ((line = in.readLine()) != null) {

-				buffer.append(line + "\n");

-			}

-			in.close();

-			confContent = buffer.toString();

-		} catch (Exception e) {

-			e.printStackTrace();

-		}

-	}

+            DataInputStream in = new DataInputStream(new FileInputStream(fileName));

+            StringBuffer buffer = new StringBuffer();

+            String line;

+            while ((line = in.readLine()) != null) {

+                buffer.append(line + "\n");

+            }

+            in.close();

+            confContent = buffer.toString();

+        } catch (Exception e) {

+            e.printStackTrace();

+        }

+    }

 

-	private void readConfContent() {

-		File dir = new File("hadoop-conf-tmp");

-		if (!dir.exists()) {

-			dir.mkdir();

-		}

+    private void readConfContent() {

+        File dir = new File("hadoop-conf-tmp");

+        if (!dir.exists()) {

+            dir.mkdir();

+        }

 

-		String fileName = "hadoop-conf-tmp/" + UUID.randomUUID()

-				+ System.currentTimeMillis() + ".xml";

-		try {

-			PrintWriter out = new PrintWriter((new OutputStreamWriter(

-					new FileOutputStream(new File(fileName)))));

-			out.write(confContent);

-			out.close();

-			conf = new JobConf(fileName);

-		} catch (Exception e) {

-			e.printStackTrace();

-		}

-	}

+        String fileName = "hadoop-conf-tmp/" + UUID.randomUUID() + System.currentTimeMillis() + ".xml";

+        try {

+            PrintWriter out = new PrintWriter((new OutputStreamWriter(new FileOutputStream(new File(fileName)))));

+            out.write(confContent);

+            out.close();

+            conf = new JobConf(fileName);

+        } catch (Exception e) {

+            e.printStackTrace();

+        }

+    }

 

-	@Override

-	/**

-	 * get the HDFS file split

-	 */

-	public FileSplit[] getFileSplitArray() {

-		readConfContent();

-		conf.setClassLoader(this.getClass().getClassLoader());

-		format = conf.getInputFormat();

-		// int splitSize = conf.getInt("mapred.min.split.size", 0);

+    @Override

+    /**

+     * get the HDFS file split

+     */

+    public FileSplit[] getFileSplitArray() {

+        readConfContent();

+        conf.setClassLoader(this.getClass().getClassLoader());

+        format = conf.getInputFormat();

+        // int splitSize = conf.getInt("mapred.min.split.size", 0);

 

-		if (splits == null) {

-			try {

-				splits = (org.apache.hadoop.mapred.FileSplit[]) format

-						.getSplits(conf, nPartition);

-				System.out.println("hdfs split number: " + splits.length);

-			} catch (IOException e) {

-				String inputPath = conf.get("mapred.input.dir");

-				String hdfsURL = conf.get("fs.default.name");

-				String alternatePath = inputPath.replaceAll(hdfsURL, "file:");

-				conf.set("mapred.input.dir", alternatePath);

-				try {

-					splits = (org.apache.hadoop.mapred.FileSplit[]) format

-							.getSplits(conf, nPartition);

-					System.out.println("hdfs split number: " + splits.length);

-				} catch (IOException e1) {

-					e1.printStackTrace();

-					Log.debug(e1.getMessage());

-					return null;

-				}

-			}

-		}

-		return splits;

-	}

+        if (splits == null) {

+            try {

+                splits = (org.apache.hadoop.mapred.FileSplit[]) format.getSplits(conf, nPartition);

+                System.out.println("hdfs split number: " + splits.length);

+            } catch (IOException e) {

+                String inputPath = conf.get("mapred.input.dir");

+                String hdfsURL = conf.get("fs.default.name");

+                String alternatePath = inputPath.replaceAll(hdfsURL, "file:");

+                conf.set("mapred.input.dir", alternatePath);

+                try {

+                    splits = (org.apache.hadoop.mapred.FileSplit[]) format.getSplits(conf, nPartition);

+                    System.out.println("hdfs split number: " + splits.length);

+                } catch (IOException e1) {

+                    e1.printStackTrace();

+                    Log.debug(e1.getMessage());

+                    return null;

+                }

+            }

+        }

+        return splits;

+    }

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/HiveTupleParser.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/HiveTupleParser.java
index 7681bd1..718c311 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/HiveTupleParser.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/HiveTupleParser.java
@@ -36,198 +36,180 @@
 @SuppressWarnings({ "rawtypes", "deprecation", "unchecked" })

 public class HiveTupleParser extends AbstractHiveTupleParser {

 

-	private int[] outputColumnsOffset;

-	/**

-	 * class of input format

-	 */

-	private InputFormat inputFormat;

+    private int[] outputColumnsOffset;

+    /**

+     * class of input format

+     */

+    private InputFormat inputFormat;

 

-	/**

-	 * serialization/deserialization object

-	 */

-	private SerDe serDe;

+    /**

+     * serialization/deserialization object

+     */

+    private SerDe serDe;

 

-	/**

-	 * the input row object inspector

-	 */

-	private ObjectInspector objectInspector;

+    /**

+     * the input row object inspector

+     */

+    private ObjectInspector objectInspector;

 

-	/**

-	 * the hadoop job conf

-	 */

-	private JobConf job;

+    /**

+     * the hadoop job conf

+     */

+    private JobConf job;

 

-	/**

-	 * Hyrax context to control resource allocation

-	 */

-	private final IHyracksTaskContext ctx;

+    /**

+     * Hyrax context to control resource allocation

+     */

+    private final IHyracksTaskContext ctx;

 

-	/**

-	 * lazy serde: format flow in between operators

-	 */

-	private final SerDe outputSerDe;

+    /**

+     * lazy serde: format flow in between operators

+     */

+    private final SerDe outputSerDe;

 

-	/**

-	 * the parser from hive data to binary data

-	 */

-	private IHiveParser parser = null;

+    /**

+     * the parser from hive data to binary data

+     */

+    private IHiveParser parser = null;

 

-	/**

-	 * parser for any hive input format

-	 * 

-	 * @param inputFormatClass

-	 * @param serDeClass

-	 * @param tbl

-	 * @param conf

-	 * @throws AlgebricksException

-	 */

-	public HiveTupleParser(String inputFormatClass, String serDeClass,

-			String outputSerDeClass, Properties tbl, JobConf conf,

-			final IHyracksTaskContext ctx, int[] outputColumnsOffset)

-			throws AlgebricksException {

-		try {

-			conf.setClassLoader(this.getClass().getClassLoader());

+    /**

+     * parser for any hive input format

+     * 

+     * @param inputFormatClass

+     * @param serDeClass

+     * @param tbl

+     * @param conf

+     * @throws AlgebricksException

+     */

+    public HiveTupleParser(String inputFormatClass, String serDeClass, String outputSerDeClass, Properties tbl,

+            JobConf conf, final IHyracksTaskContext ctx, int[] outputColumnsOffset) throws AlgebricksException {

+        try {

+            conf.setClassLoader(this.getClass().getClassLoader());

 

-			inputFormat = (InputFormat) ReflectionUtils.newInstance(

-					Class.forName(inputFormatClass), conf);

-			job = conf;

+            inputFormat = (InputFormat) ReflectionUtils.newInstance(Class.forName(inputFormatClass), conf);

+            job = conf;

 

-			// initialize the input serde

-			serDe = (SerDe) ReflectionUtils.newInstance(

-					Class.forName(serDeClass), job);

-			serDe.initialize(job, tbl);

+            // initialize the input serde

+            serDe = (SerDe) ReflectionUtils.newInstance(Class.forName(serDeClass), job);

+            serDe.initialize(job, tbl);

 

-			// initialize the output serde

-			outputSerDe = (SerDe) ReflectionUtils.newInstance(

-					Class.forName(outputSerDeClass), job);

-			outputSerDe.initialize(job, tbl);

+            // initialize the output serde

+            outputSerDe = (SerDe) ReflectionUtils.newInstance(Class.forName(outputSerDeClass), job);

+            outputSerDe.initialize(job, tbl);

 

-			// object inspector of the row

-			objectInspector = serDe.getObjectInspector();

+            // object inspector of the row

+            objectInspector = serDe.getObjectInspector();

 

-			// hyracks context

-			this.ctx = ctx;

-			this.outputColumnsOffset = outputColumnsOffset;

+            // hyracks context

+            this.ctx = ctx;

+            this.outputColumnsOffset = outputColumnsOffset;

 

-			if (objectInspector instanceof LazySimpleStructObjectInspector) {

-				LazySimpleStructObjectInspector rowInspector = (LazySimpleStructObjectInspector) objectInspector;

-				List<? extends StructField> fieldRefs = rowInspector

-						.getAllStructFieldRefs();

-				boolean lightWeightParsable = true;

-				for (StructField fieldRef : fieldRefs) {

-					Category category = fieldRef.getFieldObjectInspector()

-							.getCategory();

-					if (!(category == Category.PRIMITIVE)) {

-						lightWeightParsable = false;

-						break;

-					}

-				}

-				if (lightWeightParsable)

-					parser = new TextToBinaryTupleParser(

-							this.outputColumnsOffset, this.objectInspector);

-			}

-		} catch (Exception e) {

-			throw new AlgebricksException(e);

-		}

-	}

+            if (objectInspector instanceof LazySimpleStructObjectInspector) {

+                LazySimpleStructObjectInspector rowInspector = (LazySimpleStructObjectInspector) objectInspector;

+                List<? extends StructField> fieldRefs = rowInspector.getAllStructFieldRefs();

+                boolean lightWeightParsable = true;

+                for (StructField fieldRef : fieldRefs) {

+                    Category category = fieldRef.getFieldObjectInspector().getCategory();

+                    if (!(category == Category.PRIMITIVE)) {

+                        lightWeightParsable = false;

+                        break;

+                    }

+                }

+                if (lightWeightParsable)

+                    parser = new TextToBinaryTupleParser(this.outputColumnsOffset, this.objectInspector);

+            }

+        } catch (Exception e) {

+            throw new AlgebricksException(e);

+        }

+    }

 

-	/**

-	 * parse a input HDFS file split, the result is send to the writer

-	 * one-frame-a-time

-	 * 

-	 * @param split

-	 *            the HDFS file split

-	 * @param writer

-	 *            the writer

-	 * @throws HyracksDataException

-	 *             if there is sth. wrong in the ser/de

-	 */

-	@Override

-	public void parse(FileSplit split, IFrameWriter writer)

-			throws HyracksDataException {

-		try {

-			StructObjectInspector structInspector = (StructObjectInspector) objectInspector;

+    /**

+     * parse a input HDFS file split, the result is send to the writer

+     * one-frame-a-time

+     * 

+     * @param split

+     *            the HDFS file split

+     * @param writer

+     *            the writer

+     * @throws HyracksDataException

+     *             if there is sth. wrong in the ser/de

+     */

+    @Override

+    public void parse(FileSplit split, IFrameWriter writer) throws HyracksDataException {

+        try {

+            StructObjectInspector structInspector = (StructObjectInspector) objectInspector;

 

-			// create the reader, key, and value

-			RecordReader reader = inputFormat.getRecordReader(split, job,

-					Reporter.NULL);

-			Object key = reader.createKey();

-			Object value = reader.createValue();

+            // create the reader, key, and value

+            RecordReader reader = inputFormat.getRecordReader(split, job, Reporter.NULL);

+            Object key = reader.createKey();

+            Object value = reader.createValue();

 

-			// allocate a new frame

-			ByteBuffer frame = ctx.allocateFrame();

-			FrameTupleAppender appender = new FrameTupleAppender(

-					ctx.getFrameSize());

-			appender.reset(frame, true);

+            // allocate a new frame

+            ByteBuffer frame = ctx.allocateFrame();

+            FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());

+            appender.reset(frame, true);

 

-			List<? extends StructField> fieldRefs = structInspector

-					.getAllStructFieldRefs();

-			int size = 0;

-			for (int i = 0; i < outputColumnsOffset.length; i++)

-				if (outputColumnsOffset[i] >= 0)

-					size++;

+            List<? extends StructField> fieldRefs = structInspector.getAllStructFieldRefs();

+            int size = 0;

+            for (int i = 0; i < outputColumnsOffset.length; i++)

+                if (outputColumnsOffset[i] >= 0)

+                    size++;

 

-			ArrayTupleBuilder tb = new ArrayTupleBuilder(size);

-			DataOutput dos = tb.getDataOutput();

-			StructField[] outputFieldRefs = new StructField[size];

-			Object[] outputFields = new Object[size];

-			for (int i = 0; i < outputColumnsOffset.length; i++)

-				if (outputColumnsOffset[i] >= 0)

-					outputFieldRefs[outputColumnsOffset[i]] = fieldRefs.get(i);

+            ArrayTupleBuilder tb = new ArrayTupleBuilder(size);

+            DataOutput dos = tb.getDataOutput();

+            StructField[] outputFieldRefs = new StructField[size];

+            Object[] outputFields = new Object[size];

+            for (int i = 0; i < outputColumnsOffset.length; i++)

+                if (outputColumnsOffset[i] >= 0)

+                    outputFieldRefs[outputColumnsOffset[i]] = fieldRefs.get(i);

 

-			while (reader.next(key, value)) {

-				// reuse the tuple builder

-				tb.reset();

-				if (parser != null) {

-					Text text = (Text) value;

-					parser.parse(text.getBytes(), 0, text.getLength(), tb);

-				} else {

-					Object row = serDe.deserialize((Writable) value);

-					// write fields to the tuple builder one by one

-					int i = 0;

-					for (StructField fieldRef : fieldRefs) {

-						if (outputColumnsOffset[i] >= 0)

-							outputFields[outputColumnsOffset[i]] = structInspector

-									.getStructFieldData(row, fieldRef);

-						i++;

-					}

+            while (reader.next(key, value)) {

+                // reuse the tuple builder

+                tb.reset();

+                if (parser != null) {

+                    Text text = (Text) value;

+                    parser.parse(text.getBytes(), 0, text.getLength(), tb);

+                } else {

+                    Object row = serDe.deserialize((Writable) value);

+                    // write fields to the tuple builder one by one

+                    int i = 0;

+                    for (StructField fieldRef : fieldRefs) {

+                        if (outputColumnsOffset[i] >= 0)

+                            outputFields[outputColumnsOffset[i]] = structInspector.getStructFieldData(row, fieldRef);

+                        i++;

+                    }

 

-					i = 0;

-					for (Object field : outputFields) {

-						BytesWritable fieldWritable = (BytesWritable) outputSerDe

-								.serialize(field, outputFieldRefs[i]

-										.getFieldObjectInspector());

-						dos.write(fieldWritable.getBytes(), 0,

-								fieldWritable.getSize());

-						tb.addFieldEndOffset();

-						i++;

-					}

-				}

+                    i = 0;

+                    for (Object field : outputFields) {

+                        BytesWritable fieldWritable = (BytesWritable) outputSerDe.serialize(field,

+                                outputFieldRefs[i].getFieldObjectInspector());

+                        dos.write(fieldWritable.getBytes(), 0, fieldWritable.getSize());

+                        tb.addFieldEndOffset();

+                        i++;

+                    }

+                }

 

-				if (!appender.append(tb.getFieldEndOffsets(),

-						tb.getByteArray(), 0, tb.getSize())) {

-					if (appender.getTupleCount() <= 0)

-						throw new IllegalStateException(

-								"zero tuples in a frame!");

-					FrameUtils.flushFrame(frame, writer);

-					appender.reset(frame, true);

-					if (!appender.append(tb.getFieldEndOffsets(),

-							tb.getByteArray(), 0, tb.getSize())) {

-						throw new IllegalStateException();

-					}

-				}

-			}

-			reader.close();

-			System.gc();

+                if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {

+                    if (appender.getTupleCount() <= 0)

+                        throw new IllegalStateException("zero tuples in a frame!");

+                    FrameUtils.flushFrame(frame, writer);

+                    appender.reset(frame, true);

+                    if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {

+                        throw new IllegalStateException();

+                    }

+                }

+            }

+            reader.close();

+            System.gc();

 

-			// flush the last frame

-			if (appender.getTupleCount() > 0) {

-				FrameUtils.flushFrame(frame, writer);

-			}

-		} catch (IOException e) {

-			throw new HyracksDataException(e);

-		} catch (SerDeException e) {

-			throw new HyracksDataException(e);

-		}

-	}

+            // flush the last frame

+            if (appender.getTupleCount() > 0) {

+                FrameUtils.flushFrame(frame, writer);

+            }

+        } catch (IOException e) {

+            throw new HyracksDataException(e);

+        } catch (SerDeException e) {

+            throw new HyracksDataException(e);

+        }

+    }

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/HiveTupleParserFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/HiveTupleParserFactory.java
index 69aa881..98d730f 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/HiveTupleParserFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/HiveTupleParserFactory.java
@@ -21,91 +21,85 @@
 @SuppressWarnings("deprecation")

 public class HiveTupleParserFactory implements ITupleParserFactory {

 

-	private static final long serialVersionUID = 1L;

+    private static final long serialVersionUID = 1L;

 

-	private int[] outputColumns;

+    private int[] outputColumns;

 

-	private String outputSerDeClass = LazySerDe.class.getName();

+    private String outputSerDeClass = LazySerDe.class.getName();

 

-	private String inputSerDeClass;

+    private String inputSerDeClass;

 

-	private transient JobConf conf;

+    private transient JobConf conf;

 

-	private Properties tbl;

+    private Properties tbl;

 

-	private String confContent;

+    private String confContent;

 

-	private String inputFormatClass;

+    private String inputFormatClass;

 

-	public HiveTupleParserFactory(PartitionDesc desc, JobConf conf,

-			int[] outputColumns) {

-		this.conf = conf;

-		tbl = desc.getProperties();

-		inputFormatClass = (String) tbl.getProperty("file.inputformat");

-		inputSerDeClass = (String) tbl.getProperty("serialization.lib");

-		this.outputColumns = outputColumns;

+    public HiveTupleParserFactory(PartitionDesc desc, JobConf conf, int[] outputColumns) {

+        this.conf = conf;

+        tbl = desc.getProperties();

+        inputFormatClass = (String) tbl.getProperty("file.inputformat");

+        inputSerDeClass = (String) tbl.getProperty("serialization.lib");

+        this.outputColumns = outputColumns;

 

-		writeConfContent();

-	}

+        writeConfContent();

+    }

 

-	@Override

-	public ITupleParser createTupleParser(IHyracksTaskContext ctx) {

-		readConfContent();

-		try {

-			return new HiveTupleParser(inputFormatClass, inputSerDeClass,

-					outputSerDeClass, tbl, conf, ctx, outputColumns);

-		} catch (Exception e) {

-			e.printStackTrace();

-			return null;

-		}

-	}

+    @Override

+    public ITupleParser createTupleParser(IHyracksTaskContext ctx) {

+        readConfContent();

+        try {

+            return new HiveTupleParser(inputFormatClass, inputSerDeClass, outputSerDeClass, tbl, conf, ctx,

+                    outputColumns);

+        } catch (Exception e) {

+            e.printStackTrace();

+            return null;

+        }

+    }

 

-	private void writeConfContent() {

-		File dir = new File("hadoop-conf-tmp");

-		if (!dir.exists()) {

-			dir.mkdir();

-		}

+    private void writeConfContent() {

+        File dir = new File("hadoop-conf-tmp");

+        if (!dir.exists()) {

+            dir.mkdir();

+        }

 

-		String fileName = "hadoop-conf-tmp/" + UUID.randomUUID()

-				+ System.currentTimeMillis() + ".xml";

-		try {

-			DataOutputStream out = new DataOutputStream(new FileOutputStream(

-					new File(fileName)));

-			conf.writeXml(out);

-			out.close();

+        String fileName = "hadoop-conf-tmp/" + UUID.randomUUID() + System.currentTimeMillis() + ".xml";

+        try {

+            DataOutputStream out = new DataOutputStream(new FileOutputStream(new File(fileName)));

+            conf.writeXml(out);

+            out.close();

 

-			DataInputStream in = new DataInputStream(new FileInputStream(

-					fileName));

-			StringBuffer buffer = new StringBuffer();

-			String line;

-			while ((line = in.readLine()) != null) {

-				buffer.append(line + "\n");

-			}

-			in.close();

-			confContent = buffer.toString();

-		} catch (Exception e) {

-			e.printStackTrace();

-		}

-	}

+            DataInputStream in = new DataInputStream(new FileInputStream(fileName));

+            StringBuffer buffer = new StringBuffer();

+            String line;

+            while ((line = in.readLine()) != null) {

+                buffer.append(line + "\n");

+            }

+            in.close();

+            confContent = buffer.toString();

+        } catch (Exception e) {

+            e.printStackTrace();

+        }

+    }

 

-	private void readConfContent() {

-		File dir = new File("hadoop-conf-tmp");

-		if (!dir.exists()) {

-			dir.mkdir();

-		}

+    private void readConfContent() {

+        File dir = new File("hadoop-conf-tmp");

+        if (!dir.exists()) {

+            dir.mkdir();

+        }

 

-		String fileName = "hadoop-conf-tmp/" + UUID.randomUUID()

-				+ System.currentTimeMillis() + ".xml";

-		try {

-			PrintWriter out = new PrintWriter((new OutputStreamWriter(

-					new FileOutputStream(new File(fileName)))));

-			out.write(confContent);

-			out.close();

+        String fileName = "hadoop-conf-tmp/" + UUID.randomUUID() + System.currentTimeMillis() + ".xml";

+        try {

+            PrintWriter out = new PrintWriter((new OutputStreamWriter(new FileOutputStream(new File(fileName)))));

+            out.write(confContent);

+            out.close();

 

-			conf = new JobConf(fileName);

-		} catch (Exception e) {

-			e.printStackTrace();

-		}

-	}

+            conf = new JobConf(fileName);

+        } catch (Exception e) {

+            e.printStackTrace();

+        }

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/Partition.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/Partition.java
index 1b3dcf2..6742a34 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/Partition.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/Partition.java
@@ -8,28 +8,28 @@
 

 @SuppressWarnings("deprecation")

 public class Partition implements Serializable {

-	private static final long serialVersionUID = 1L;

+    private static final long serialVersionUID = 1L;

 

-	private String uri;

-	private long offset;

-	private long length;

-	private String[] locations;

+    private String uri;

+    private long offset;

+    private long length;

+    private String[] locations;

 

-	public Partition() {

-	}

+    public Partition() {

+    }

 

-	public Partition(FileSplit file) {

-		uri = file.getPath().toUri().toString();

-		offset = file.getStart();

-		length = file.getLength();

-		try {

-			locations = file.getLocations();

-		} catch (IOException e) {

-			throw new IllegalStateException(e);

-		}

-	}

+    public Partition(FileSplit file) {

+        uri = file.getPath().toUri().toString();

+        offset = file.getStart();

+        length = file.getLength();

+        try {

+            locations = file.getLocations();

+        } catch (IOException e) {

+            throw new IllegalStateException(e);

+        }

+    }

 

-	public FileSplit toFileSplit() {

-		return new FileSplit(new Path(uri), offset, length, locations);

-	}

+    public FileSplit toFileSplit() {

+        return new FileSplit(new Path(uri), offset, length, locations);

+    }

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filewrite/HiveFileWritePushRuntime.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filewrite/HiveFileWritePushRuntime.java
index 05e79ea..0d74f0b 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filewrite/HiveFileWritePushRuntime.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filewrite/HiveFileWritePushRuntime.java
@@ -24,130 +24,124 @@
 @SuppressWarnings("deprecation")

 public class HiveFileWritePushRuntime implements IPushRuntime {

 

-	/**

-	 * frame tuple accessor to access byte buffer

-	 */

-	private final FrameTupleAccessor accessor;

+    /**

+     * frame tuple accessor to access byte buffer

+     */

+    private final FrameTupleAccessor accessor;

 

-	/**

-	 * input object inspector

-	 */

-	private final ObjectInspector inputInspector;

+    /**

+     * input object inspector

+     */

+    private final ObjectInspector inputInspector;

 

-	/**

-	 * cachedInput

-	 */

-	private final LazyColumnar cachedInput;

+    /**

+     * cachedInput

+     */

+    private final LazyColumnar cachedInput;

 

-	/**

-	 * File sink operator of Hive

-	 */

-	private final FileSinkDesc fileSink;

+    /**

+     * File sink operator of Hive

+     */

+    private final FileSinkDesc fileSink;

 

-	/**

-	 * job configuration, which contain name node and other configuration

-	 * information

-	 */

-	private JobConf conf;

+    /**

+     * job configuration, which contain name node and other configuration

+     * information

+     */

+    private JobConf conf;

 

-	/**

-	 * input object inspector

-	 */

-	private final Schema inputSchema;

+    /**

+     * input object inspector

+     */

+    private final Schema inputSchema;

 

-	/**

-	 * a copy of hive schema representation

-	 */

-	private RowSchema rowSchema;

+    /**

+     * a copy of hive schema representation

+     */

+    private RowSchema rowSchema;

 

-	/**

-	 * the Hive file sink operator

-	 */

-	private FileSinkOperator fsOp;

+    /**

+     * the Hive file sink operator

+     */

+    private FileSinkOperator fsOp;

 

-	/**

-	 * cached tuple object reference

-	 */

-	private FrameTupleReference tuple = new FrameTupleReference();

+    /**

+     * cached tuple object reference

+     */

+    private FrameTupleReference tuple = new FrameTupleReference();

 

-	/**

-	 * @param spec

-	 * @param fsProvider

-	 */

-	public HiveFileWritePushRuntime(IHyracksTaskContext context,

-			RecordDescriptor inputRecordDesc, JobConf job, FileSinkDesc fs,

-			RowSchema schema, Schema oi) {

-		fileSink = fs;

-		fileSink.setGatherStats(false);

+    /**

+     * @param spec

+     * @param fsProvider

+     */

+    public HiveFileWritePushRuntime(IHyracksTaskContext context, RecordDescriptor inputRecordDesc, JobConf job,

+            FileSinkDesc fs, RowSchema schema, Schema oi) {

+        fileSink = fs;

+        fileSink.setGatherStats(false);

 

-		rowSchema = schema;

-		conf = job;

-		inputSchema = oi;

+        rowSchema = schema;

+        conf = job;

+        inputSchema = oi;

 

-		accessor = new FrameTupleAccessor(context.getFrameSize(),

-				inputRecordDesc);

-		inputInspector = inputSchema.toObjectInspector();

-		cachedInput = new LazyColumnar(

-				(LazyColumnarObjectInspector) inputInspector);

-	}

+        accessor = new FrameTupleAccessor(context.getFrameSize(), inputRecordDesc);

+        inputInspector = inputSchema.toObjectInspector();

+        cachedInput = new LazyColumnar((LazyColumnarObjectInspector) inputInspector);

+    }

 

-	@Override

-	public void open() throws HyracksDataException {

-		fsOp = (FileSinkOperator) OperatorFactory.get(fileSink, rowSchema);

-		fsOp.setChildOperators(null);

-		fsOp.setParentOperators(null);

-		conf.setClassLoader(this.getClass().getClassLoader());

+    @Override

+    public void open() throws HyracksDataException {

+        fsOp = (FileSinkOperator) OperatorFactory.get(fileSink, rowSchema);

+        fsOp.setChildOperators(null);

+        fsOp.setParentOperators(null);

+        conf.setClassLoader(this.getClass().getClassLoader());

 

-		ObjectInspector[] inspectors = new ObjectInspector[1];

-		inspectors[0] = inputInspector;

-		try {

-			fsOp.initialize(conf, inspectors);

-			fsOp.setExecContext(null);

-		} catch (Exception e) {

-			e.printStackTrace();

-		}

-	}

+        ObjectInspector[] inspectors = new ObjectInspector[1];

+        inspectors[0] = inputInspector;

+        try {

+            fsOp.initialize(conf, inspectors);

+            fsOp.setExecContext(null);

+        } catch (Exception e) {

+            e.printStackTrace();

+        }

+    }

 

-	@Override

-	public void nextFrame(ByteBuffer buffer) throws HyracksDataException {

-		accessor.reset(buffer);

-		int n = accessor.getTupleCount();

-		try {

-			for (int i = 0; i < n; ++i) {

-				tuple.reset(accessor, i);

-				cachedInput.init(tuple);

-				fsOp.process(cachedInput, 0);

-			}

-		} catch (HiveException e) {

-			throw new HyracksDataException(e);

-		}

-	}

+    @Override

+    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {

+        accessor.reset(buffer);

+        int n = accessor.getTupleCount();

+        try {

+            for (int i = 0; i < n; ++i) {

+                tuple.reset(accessor, i);

+                cachedInput.init(tuple);

+                fsOp.process(cachedInput, 0);

+            }

+        } catch (HiveException e) {

+            throw new HyracksDataException(e);

+        }

+    }

 

-	@Override

-	public void close() throws HyracksDataException {

-		try {

-			Thread.currentThread().setContextClassLoader(

-					this.getClass().getClassLoader());

-			fsOp.closeOp(false);

-		} catch (HiveException e) {

-			throw new HyracksDataException(e);

-		}

-	}

+    @Override

+    public void close() throws HyracksDataException {

+        try {

+            Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader());

+            fsOp.closeOp(false);

+        } catch (HiveException e) {

+            throw new HyracksDataException(e);

+        }

+    }

 

-	@Override

-	public void setFrameWriter(int index, IFrameWriter writer,

-			RecordDescriptor recordDesc) {

-		throw new IllegalStateException();

-	}

+    @Override

+    public void setFrameWriter(int index, IFrameWriter writer, RecordDescriptor recordDesc) {

+        throw new IllegalStateException();

+    }

 

-	@Override

-	public void setInputRecordDescriptor(int index,

-			RecordDescriptor recordDescriptor) {

-	}

+    @Override

+    public void setInputRecordDescriptor(int index, RecordDescriptor recordDescriptor) {

+    }

 

-	@Override

-	public void fail() throws HyracksDataException {

+    @Override

+    public void fail() throws HyracksDataException {

 

-	}

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filewrite/HivePushRuntimeFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filewrite/HivePushRuntimeFactory.java
index 43e90fa..213bbca 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filewrite/HivePushRuntimeFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filewrite/HivePushRuntimeFactory.java
@@ -24,90 +24,82 @@
 @SuppressWarnings("deprecation")

 public class HivePushRuntimeFactory implements IPushRuntimeFactory {

 

-	private static final long serialVersionUID = 1L;

+    private static final long serialVersionUID = 1L;

 

-	private final RecordDescriptor inputRecordDesc;

-	private transient JobConf conf;

-	private final FileSinkDesc fileSink;

-	private final RowSchema outSchema;

-	private final Schema schema;

+    private final RecordDescriptor inputRecordDesc;

+    private transient JobConf conf;

+    private final FileSinkDesc fileSink;

+    private final RowSchema outSchema;

+    private final Schema schema;

 

-	/**

-	 * the content of the configuration

-	 */

-	private String confContent;

+    /**

+     * the content of the configuration

+     */

+    private String confContent;

 

-	public HivePushRuntimeFactory(RecordDescriptor inputRecordDesc,

-			JobConf conf, FileSinkOperator fsp, Schema sch) {

-		this.inputRecordDesc = inputRecordDesc;

-		this.conf = conf;

-		this.fileSink = fsp.getConf();

-		outSchema = fsp.getSchema();

-		this.schema = sch;

+    public HivePushRuntimeFactory(RecordDescriptor inputRecordDesc, JobConf conf, FileSinkOperator fsp, Schema sch) {

+        this.inputRecordDesc = inputRecordDesc;

+        this.conf = conf;

+        this.fileSink = fsp.getConf();

+        outSchema = fsp.getSchema();

+        this.schema = sch;

 

-		writeConfContent();

-	}

+        writeConfContent();

+    }

 

-	@Override

-	public String toString() {

-		return "file write";

-	}

+    @Override

+    public String toString() {

+        return "file write";

+    }

 

-	@Override

-	public IPushRuntime createPushRuntime(IHyracksTaskContext context)

-			throws AlgebricksException {

-		if (conf == null)

-			readConfContent();

+    @Override

+    public IPushRuntime createPushRuntime(IHyracksTaskContext context) throws AlgebricksException {

+        if (conf == null)

+            readConfContent();

 

-		return new HiveFileWritePushRuntime(context, inputRecordDesc, conf,

-				fileSink, outSchema, schema);

-	}

+        return new HiveFileWritePushRuntime(context, inputRecordDesc, conf, fileSink, outSchema, schema);

+    }

 

-	private void readConfContent() {

-		File dir = new File("hadoop-conf-tmp");

-		if (!dir.exists()) {

-			dir.mkdir();

-		}

+    private void readConfContent() {

+        File dir = new File("hadoop-conf-tmp");

+        if (!dir.exists()) {

+            dir.mkdir();

+        }

 

-		String fileName = "hadoop-conf-tmp/" + UUID.randomUUID()

-				+ System.currentTimeMillis() + ".xml";

-		try {

-			PrintWriter out = new PrintWriter((new OutputStreamWriter(

-					new FileOutputStream(new File(fileName)))));

-			out.write(confContent);

-			out.close();

-			conf = new JobConf(fileName);

-		} catch (Exception e) {

-			e.printStackTrace();

-		}

-	}

+        String fileName = "hadoop-conf-tmp/" + UUID.randomUUID() + System.currentTimeMillis() + ".xml";

+        try {

+            PrintWriter out = new PrintWriter((new OutputStreamWriter(new FileOutputStream(new File(fileName)))));

+            out.write(confContent);

+            out.close();

+            conf = new JobConf(fileName);

+        } catch (Exception e) {

+            e.printStackTrace();

+        }

+    }

 

-	private void writeConfContent() {

-		File dir = new File("hadoop-conf-tmp");

-		if (!dir.exists()) {

-			dir.mkdir();

-		}

+    private void writeConfContent() {

+        File dir = new File("hadoop-conf-tmp");

+        if (!dir.exists()) {

+            dir.mkdir();

+        }

 

-		String fileName = "hadoop-conf-tmp/" + UUID.randomUUID()

-				+ System.currentTimeMillis() + ".xml";

-		try {

-			DataOutputStream out = new DataOutputStream(new FileOutputStream(

-					new File(fileName)));

-			conf.writeXml(out);

-			out.close();

+        String fileName = "hadoop-conf-tmp/" + UUID.randomUUID() + System.currentTimeMillis() + ".xml";

+        try {

+            DataOutputStream out = new DataOutputStream(new FileOutputStream(new File(fileName)));

+            conf.writeXml(out);

+            out.close();

 

-			DataInputStream in = new DataInputStream(new FileInputStream(

-					fileName));

-			StringBuffer buffer = new StringBuffer();

-			String line;

-			while ((line = in.readLine()) != null) {

-				buffer.append(line + "\n");

-			}

-			in.close();

-			confContent = buffer.toString();

-		} catch (Exception e) {

-			e.printStackTrace();

-		}

-	}

+            DataInputStream in = new DataInputStream(new FileInputStream(fileName));

+            StringBuffer buffer = new StringBuffer();

+            String line;

+            while ((line = in.readLine()) != null) {

+                buffer.append(line + "\n");

+            }

+            in.close();

+            confContent = buffer.toString();

+        } catch (Exception e) {

+            e.printStackTrace();

+        }

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveBinaryComparatorFactoryProvider.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveBinaryComparatorFactoryProvider.java
index 5a2e98c..467ec0a 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveBinaryComparatorFactoryProvider.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveBinaryComparatorFactoryProvider.java
@@ -21,57 +21,55 @@
 import edu.uci.ics.hyracks.algebricks.data.IBinaryComparatorFactoryProvider;

 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

 

-public class HiveBinaryComparatorFactoryProvider implements

-		IBinaryComparatorFactoryProvider {

+public class HiveBinaryComparatorFactoryProvider implements IBinaryComparatorFactoryProvider {

 

-	public static final HiveBinaryComparatorFactoryProvider INSTANCE = new HiveBinaryComparatorFactoryProvider();

+    public static final HiveBinaryComparatorFactoryProvider INSTANCE = new HiveBinaryComparatorFactoryProvider();

 

-	private HiveBinaryComparatorFactoryProvider() {

-	}

+    private HiveBinaryComparatorFactoryProvider() {

+    }

 

-	@Override

-	public IBinaryComparatorFactory getBinaryComparatorFactory(Object type,

-			boolean ascending) throws AlgebricksException {

-		if (type.equals(TypeInfoFactory.intTypeInfo)) {

-			if (ascending)

-				return HiveIntegerBinaryAscComparatorFactory.INSTANCE;

-			else

-				return HiveIntegerBinaryDescComparatorFactory.INSTANCE;

+    @Override

+    public IBinaryComparatorFactory getBinaryComparatorFactory(Object type, boolean ascending)

+            throws AlgebricksException {

+        if (type.equals(TypeInfoFactory.intTypeInfo)) {

+            if (ascending)

+                return HiveIntegerBinaryAscComparatorFactory.INSTANCE;

+            else

+                return HiveIntegerBinaryDescComparatorFactory.INSTANCE;

 

-		} else if (type.equals(TypeInfoFactory.longTypeInfo)) {

-			if (ascending)

-				return HiveLongBinaryAscComparatorFactory.INSTANCE;

-			else

-				return HiveLongBinaryDescComparatorFactory.INSTANCE;

+        } else if (type.equals(TypeInfoFactory.longTypeInfo)) {

+            if (ascending)

+                return HiveLongBinaryAscComparatorFactory.INSTANCE;

+            else

+                return HiveLongBinaryDescComparatorFactory.INSTANCE;

 

-		} else if (type.equals(TypeInfoFactory.floatTypeInfo)) {

-			if (ascending)

-				return HiveFloatBinaryAscComparatorFactory.INSTANCE;

-			else

-				return HiveFloatBinaryDescComparatorFactory.INSTANCE;

+        } else if (type.equals(TypeInfoFactory.floatTypeInfo)) {

+            if (ascending)

+                return HiveFloatBinaryAscComparatorFactory.INSTANCE;

+            else

+                return HiveFloatBinaryDescComparatorFactory.INSTANCE;

 

-		} else if (type.equals(TypeInfoFactory.doubleTypeInfo)) {

-			if (ascending)

-				return HiveDoubleBinaryAscComparatorFactory.INSTANCE;

-			else

-				return HiveDoubleBinaryDescComparatorFactory.INSTANCE;

-		} else if (type.equals(TypeInfoFactory.shortTypeInfo)) {

-			if (ascending)

-				return HiveShortBinaryAscComparatorFactory.INSTANCE;

-			else

-				return HiveShortBinaryDescComparatorFactory.INSTANCE;

-		} else if (type.equals(TypeInfoFactory.stringTypeInfo)) {

-			if (ascending)

-				return HiveStringBinaryAscComparatorFactory.INSTANCE;

-			else

-				return HiveStringBinaryDescComparatorFactory.INSTANCE;

-		} else if (type.equals(TypeInfoFactory.byteTypeInfo)

-				|| type.equals(TypeInfoFactory.booleanTypeInfo)) {

-			if (ascending)

-				return HiveByteBinaryAscComparatorFactory.INSTANCE;

-			else

-				return HiveByteBinaryDescComparatorFactory.INSTANCE;

-		} else

-			throw new NotImplementedException();

-	}

+        } else if (type.equals(TypeInfoFactory.doubleTypeInfo)) {

+            if (ascending)

+                return HiveDoubleBinaryAscComparatorFactory.INSTANCE;

+            else

+                return HiveDoubleBinaryDescComparatorFactory.INSTANCE;

+        } else if (type.equals(TypeInfoFactory.shortTypeInfo)) {

+            if (ascending)

+                return HiveShortBinaryAscComparatorFactory.INSTANCE;

+            else

+                return HiveShortBinaryDescComparatorFactory.INSTANCE;

+        } else if (type.equals(TypeInfoFactory.stringTypeInfo)) {

+            if (ascending)

+                return HiveStringBinaryAscComparatorFactory.INSTANCE;

+            else

+                return HiveStringBinaryDescComparatorFactory.INSTANCE;

+        } else if (type.equals(TypeInfoFactory.byteTypeInfo) || type.equals(TypeInfoFactory.booleanTypeInfo)) {

+            if (ascending)

+                return HiveByteBinaryAscComparatorFactory.INSTANCE;

+            else

+                return HiveByteBinaryDescComparatorFactory.INSTANCE;

+        } else

+            throw new NotImplementedException();

+    }

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveBinaryHashFunctionFactoryProvider.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveBinaryHashFunctionFactoryProvider.java
index 371d45b..473eee1 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveBinaryHashFunctionFactoryProvider.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveBinaryHashFunctionFactoryProvider.java
@@ -11,27 +11,25 @@
 import edu.uci.ics.hyracks.algebricks.data.IBinaryHashFunctionFactoryProvider;

 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;

 

-public class HiveBinaryHashFunctionFactoryProvider implements

-		IBinaryHashFunctionFactoryProvider {

+public class HiveBinaryHashFunctionFactoryProvider implements IBinaryHashFunctionFactoryProvider {

 

-	public static final HiveBinaryHashFunctionFactoryProvider INSTANCE = new HiveBinaryHashFunctionFactoryProvider();

+    public static final HiveBinaryHashFunctionFactoryProvider INSTANCE = new HiveBinaryHashFunctionFactoryProvider();

 

-	private HiveBinaryHashFunctionFactoryProvider() {

-	}

+    private HiveBinaryHashFunctionFactoryProvider() {

+    }

 

-	@Override

-	public IBinaryHashFunctionFactory getBinaryHashFunctionFactory(Object type)

-			throws AlgebricksException {

-		if (type.equals(TypeInfoFactory.intTypeInfo)) {

-			return HiveIntegerBinaryHashFunctionFactory.INSTANCE;

-		} else if (type.equals(TypeInfoFactory.longTypeInfo)) {

-			return HiveLongBinaryHashFunctionFactory.INSTANCE;

-		} else if (type.equals(TypeInfoFactory.stringTypeInfo)) {

-			return HiveStingBinaryHashFunctionFactory.INSTANCE;

-		} else if (type.equals(TypeInfoFactory.doubleTypeInfo)) {

-			return HiveDoubleBinaryHashFunctionFactory.INSTANCE;

-		} else {

-			return HiveRawBinaryHashFunctionFactory.INSTANCE;

-		}

-	}

+    @Override

+    public IBinaryHashFunctionFactory getBinaryHashFunctionFactory(Object type) throws AlgebricksException {

+        if (type.equals(TypeInfoFactory.intTypeInfo)) {

+            return HiveIntegerBinaryHashFunctionFactory.INSTANCE;

+        } else if (type.equals(TypeInfoFactory.longTypeInfo)) {

+            return HiveLongBinaryHashFunctionFactory.INSTANCE;

+        } else if (type.equals(TypeInfoFactory.stringTypeInfo)) {

+            return HiveStingBinaryHashFunctionFactory.INSTANCE;

+        } else if (type.equals(TypeInfoFactory.doubleTypeInfo)) {

+            return HiveDoubleBinaryHashFunctionFactory.INSTANCE;

+        } else {

+            return HiveRawBinaryHashFunctionFactory.INSTANCE;

+        }

+    }

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveNormalizedKeyComputerFactoryProvider.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveNormalizedKeyComputerFactoryProvider.java
index 9e3a8ae..91bf3e5 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveNormalizedKeyComputerFactoryProvider.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveNormalizedKeyComputerFactoryProvider.java
@@ -13,41 +13,39 @@
 import edu.uci.ics.hyracks.algebricks.data.INormalizedKeyComputerFactoryProvider;
 import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
 
-public class HiveNormalizedKeyComputerFactoryProvider implements
-		INormalizedKeyComputerFactoryProvider {
+public class HiveNormalizedKeyComputerFactoryProvider implements INormalizedKeyComputerFactoryProvider {
 
-	public static final HiveNormalizedKeyComputerFactoryProvider INSTANCE = new HiveNormalizedKeyComputerFactoryProvider();
+    public static final HiveNormalizedKeyComputerFactoryProvider INSTANCE = new HiveNormalizedKeyComputerFactoryProvider();
 
-	private HiveNormalizedKeyComputerFactoryProvider() {
-	}
+    private HiveNormalizedKeyComputerFactoryProvider() {
+    }
 
-	@Override
-	public INormalizedKeyComputerFactory getNormalizedKeyComputerFactory(
-			Object type, boolean ascending) {
-		if (ascending) {
-			if (type.equals(TypeInfoFactory.stringTypeInfo)) {
-				return new HiveStringAscNormalizedKeyComputerFactory();
-			} else if (type.equals(TypeInfoFactory.intTypeInfo)) {
-				return new HiveIntegerAscNormalizedKeyComputerFactory();
-			} else if (type.equals(TypeInfoFactory.longTypeInfo)) {
-				return new HiveLongAscNormalizedKeyComputerFactory();
-			} else if (type.equals(TypeInfoFactory.doubleTypeInfo)) {
-				return new HiveDoubleAscNormalizedKeyComputerFactory();
-			} else {
-				return null;
-			}
-		} else {
-			if (type.equals(TypeInfoFactory.stringTypeInfo)) {
-				return new HiveStringDescNormalizedKeyComputerFactory();
-			} else if (type.equals(TypeInfoFactory.intTypeInfo)) {
-				return new HiveIntegerDescNormalizedKeyComputerFactory();
-			} else if (type.equals(TypeInfoFactory.longTypeInfo)) {
-				return new HiveLongDescNormalizedKeyComputerFactory();
-			} else if (type.equals(TypeInfoFactory.doubleTypeInfo)) {
-				return new HiveDoubleDescNormalizedKeyComputerFactory();
-			} else {
-				return null;
-			}
-		}
-	}
+    @Override
+    public INormalizedKeyComputerFactory getNormalizedKeyComputerFactory(Object type, boolean ascending) {
+        if (ascending) {
+            if (type.equals(TypeInfoFactory.stringTypeInfo)) {
+                return new HiveStringAscNormalizedKeyComputerFactory();
+            } else if (type.equals(TypeInfoFactory.intTypeInfo)) {
+                return new HiveIntegerAscNormalizedKeyComputerFactory();
+            } else if (type.equals(TypeInfoFactory.longTypeInfo)) {
+                return new HiveLongAscNormalizedKeyComputerFactory();
+            } else if (type.equals(TypeInfoFactory.doubleTypeInfo)) {
+                return new HiveDoubleAscNormalizedKeyComputerFactory();
+            } else {
+                return null;
+            }
+        } else {
+            if (type.equals(TypeInfoFactory.stringTypeInfo)) {
+                return new HiveStringDescNormalizedKeyComputerFactory();
+            } else if (type.equals(TypeInfoFactory.intTypeInfo)) {
+                return new HiveIntegerDescNormalizedKeyComputerFactory();
+            } else if (type.equals(TypeInfoFactory.longTypeInfo)) {
+                return new HiveLongDescNormalizedKeyComputerFactory();
+            } else if (type.equals(TypeInfoFactory.doubleTypeInfo)) {
+                return new HiveDoubleDescNormalizedKeyComputerFactory();
+            } else {
+                return null;
+            }
+        }
+    }
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HivePrinterFactoryProvider.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HivePrinterFactoryProvider.java
index bebb457..10c84d2 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HivePrinterFactoryProvider.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HivePrinterFactoryProvider.java
@@ -6,12 +6,11 @@
 

 public class HivePrinterFactoryProvider implements IPrinterFactoryProvider {

 

-	public static IPrinterFactoryProvider INSTANCE = new HivePrinterFactoryProvider();

+    public static IPrinterFactoryProvider INSTANCE = new HivePrinterFactoryProvider();

 

-	@Override

-	public IPrinterFactory getPrinterFactory(Object type)

-			throws AlgebricksException {

-		return null;

-	}

+    @Override

+    public IPrinterFactory getPrinterFactory(Object type) throws AlgebricksException {

+        return null;

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveSerializerDeserializerProvider.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveSerializerDeserializerProvider.java
index 7938de8..22f81e0 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveSerializerDeserializerProvider.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveSerializerDeserializerProvider.java
@@ -4,20 +4,18 @@
 import edu.uci.ics.hyracks.algebricks.data.ISerializerDeserializerProvider;

 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;

 

-public class HiveSerializerDeserializerProvider implements

-		ISerializerDeserializerProvider {

+public class HiveSerializerDeserializerProvider implements ISerializerDeserializerProvider {

 

-	public static final HiveSerializerDeserializerProvider INSTANCE = new HiveSerializerDeserializerProvider();

+    public static final HiveSerializerDeserializerProvider INSTANCE = new HiveSerializerDeserializerProvider();

 

-	private HiveSerializerDeserializerProvider() {

-	}

+    private HiveSerializerDeserializerProvider() {

+    }

 

-	@SuppressWarnings("rawtypes")

-	@Override

-	public ISerializerDeserializer getSerializerDeserializer(Object type)

-			throws AlgebricksException {

-		// return ARecordSerializerDeserializer.SCHEMALESS_INSTANCE;

-		return null;

-	}

+    @SuppressWarnings("rawtypes")

+    @Override

+    public ISerializerDeserializer getSerializerDeserializer(Object type) throws AlgebricksException {

+        // return ARecordSerializerDeserializer.SCHEMALESS_INSTANCE;

+        return null;

+    }

 

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveTypeTraitProvider.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveTypeTraitProvider.java
index 2059128..be4b149 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveTypeTraitProvider.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveTypeTraitProvider.java
@@ -6,28 +6,28 @@
 import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
 
 public class HiveTypeTraitProvider implements ITypeTraitProvider, Serializable {
-	private static final long serialVersionUID = 1L;
-	public static HiveTypeTraitProvider INSTANCE = new HiveTypeTraitProvider();
+    private static final long serialVersionUID = 1L;
+    public static HiveTypeTraitProvider INSTANCE = new HiveTypeTraitProvider();
 
-	private HiveTypeTraitProvider() {
+    private HiveTypeTraitProvider() {
 
-	}
+    }
 
-	@Override
-	public ITypeTraits getTypeTrait(Object arg0) {
-		return new ITypeTraits() {
-			private static final long serialVersionUID = 1L;
+    @Override
+    public ITypeTraits getTypeTrait(Object arg0) {
+        return new ITypeTraits() {
+            private static final long serialVersionUID = 1L;
 
-			@Override
-			public int getFixedLength() {
-				return -1;
-			}
+            @Override
+            public int getFixedLength() {
+                return -1;
+            }
 
-			@Override
-			public boolean isFixedLength() {
-				return false;
-			}
+            @Override
+            public boolean isFixedLength() {
+                return false;
+            }
 
-		};
-	}
+        };
+    }
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/ByteArrayRef.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/ByteArrayRef.java
index 673416d..92415f9 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/ByteArrayRef.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/ByteArrayRef.java
@@ -19,24 +19,23 @@
 
 /**
  * ByteArrayRef stores a reference to a byte array.
- * 
  * The LazyObject hierarchy uses a reference to a single ByteArrayRef, so that
  * it's much faster to switch to the next row and release the reference to the
  * old row (so that the system can do garbage collection if needed).
  */
 public class ByteArrayRef {
 
-	/**
-	 * Stores the actual data.
-	 */
-	byte[] data;
+    /**
+     * Stores the actual data.
+     */
+    byte[] data;
 
-	public byte[] getData() {
-		return data;
-	}
+    public byte[] getData() {
+        return data;
+    }
 
-	public void setData(byte[] data) {
-		this.data = data;
-	}
+    public void setData(byte[] data) {
+        this.data = data;
+    }
 
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyArray.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyArray.java
index 821c03d..33b20bf 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyArray.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyArray.java
@@ -31,206 +31,199 @@
 /**
  * LazyArray is serialized as follows: start A b b b b b b end bytes[] ->
  * |--------|---|---|---|---| ... |---|---|
- * 
  * Section A is the null-bytes. Suppose the list has N elements, then there are
  * (N+7)/8 bytes used as null-bytes. Each bit corresponds to an element and it
  * indicates whether that element is null (0) or not null (1).
- * 
  * After A, all b(s) represent the elements of the list. Each of them is again a
  * LazyObject.
- * 
  */
 
 public class LazyArray extends LazyNonPrimitive<LazyListObjectInspector> {
 
-	/**
-	 * Whether the data is already parsed or not.
-	 */
-	boolean parsed = false;
-	/**
-	 * The length of the array. Only valid when the data is parsed.
-	 */
-	int arraySize = 0;
+    /**
+     * Whether the data is already parsed or not.
+     */
+    boolean parsed = false;
+    /**
+     * The length of the array. Only valid when the data is parsed.
+     */
+    int arraySize = 0;
 
-	/**
-	 * The start positions and lengths of array elements. Only valid when the
-	 * data is parsed.
-	 */
-	int[] elementStart;
-	int[] elementLength;
+    /**
+     * The start positions and lengths of array elements. Only valid when the
+     * data is parsed.
+     */
+    int[] elementStart;
+    int[] elementLength;
 
-	/**
-	 * Whether an element is initialized or not.
-	 */
-	boolean[] elementInited;
+    /**
+     * Whether an element is initialized or not.
+     */
+    boolean[] elementInited;
 
-	/**
-	 * Whether an element is null or not. Because length is 0 does not means the
-	 * field is null. In particular, a 0-length string is not null.
-	 */
-	boolean[] elementIsNull;
+    /**
+     * Whether an element is null or not. Because length is 0 does not means the
+     * field is null. In particular, a 0-length string is not null.
+     */
+    boolean[] elementIsNull;
 
-	/**
-	 * The elements of the array. Note that we call arrayElements[i].init(bytes,
-	 * begin, length) only when that element is accessed.
-	 */
-	@SuppressWarnings("rawtypes")
-	LazyObject[] arrayElements;
+    /**
+     * The elements of the array. Note that we call arrayElements[i].init(bytes,
+     * begin, length) only when that element is accessed.
+     */
+    @SuppressWarnings("rawtypes")
+    LazyObject[] arrayElements;
 
-	/**
-	 * Construct a LazyArray object with the ObjectInspector.
-	 * 
-	 * @param oi
-	 *            the oi representing the type of this LazyArray
-	 */
-	protected LazyArray(LazyListObjectInspector oi) {
-		super(oi);
-	}
+    /**
+     * Construct a LazyArray object with the ObjectInspector.
+     * 
+     * @param oi
+     *            the oi representing the type of this LazyArray
+     */
+    protected LazyArray(LazyListObjectInspector oi) {
+        super(oi);
+    }
 
-	/**
-	 * Set the row data for this LazyArray.
-	 * 
-	 * @see LazyObject#init(ByteArrayRef, int, int)
-	 */
-	@Override
-	public void init(byte[] bytes, int start, int length) {
-		super.init(bytes, start, length);
-		parsed = false;
-	}
+    /**
+     * Set the row data for this LazyArray.
+     * 
+     * @see LazyObject#init(ByteArrayRef, int, int)
+     */
+    @Override
+    public void init(byte[] bytes, int start, int length) {
+        super.init(bytes, start, length);
+        parsed = false;
+    }
 
-	/**
-	 * Enlarge the size of arrays storing information for the elements inside
-	 * the array.
-	 */
-	private void adjustArraySize(int newSize) {
-		if (elementStart == null || elementStart.length < newSize) {
-			elementStart = new int[newSize];
-			elementLength = new int[newSize];
-			elementInited = new boolean[newSize];
-			elementIsNull = new boolean[newSize];
-			arrayElements = new LazyObject[newSize];
-		}
-	}
+    /**
+     * Enlarge the size of arrays storing information for the elements inside
+     * the array.
+     */
+    private void adjustArraySize(int newSize) {
+        if (elementStart == null || elementStart.length < newSize) {
+            elementStart = new int[newSize];
+            elementLength = new int[newSize];
+            elementInited = new boolean[newSize];
+            elementIsNull = new boolean[newSize];
+            arrayElements = new LazyObject[newSize];
+        }
+    }
 
-	VInt vInt = new LazyUtils.VInt();
-	RecordInfo recordInfo = new LazyUtils.RecordInfo();
+    VInt vInt = new LazyUtils.VInt();
+    RecordInfo recordInfo = new LazyUtils.RecordInfo();
 
-	/**
-	 * Parse the bytes and fill elementStart, elementLength, elementInited and
-	 * elementIsNull.
-	 */
-	private void parse() {
+    /**
+     * Parse the bytes and fill elementStart, elementLength, elementInited and
+     * elementIsNull.
+     */
+    private void parse() {
 
-		// get the vlong that represents the map size
-		LazyUtils.readVInt(bytes, start, vInt);
-		arraySize = vInt.value;
-		if (0 == arraySize) {
-			parsed = true;
-			return;
-		}
+        // get the vlong that represents the map size
+        LazyUtils.readVInt(bytes, start, vInt);
+        arraySize = vInt.value;
+        if (0 == arraySize) {
+            parsed = true;
+            return;
+        }
 
-		// adjust arrays
-		adjustArraySize(arraySize);
-		// find out the null-bytes
-		int arryByteStart = start + vInt.length;
-		int nullByteCur = arryByteStart;
-		int nullByteEnd = arryByteStart + (arraySize + 7) / 8;
-		// the begin the real elements
-		int lastElementByteEnd = nullByteEnd;
-		// the list element object inspector
-		ObjectInspector listEleObjectInspector = ((ListObjectInspector) oi)
-				.getListElementObjectInspector();
-		// parsing elements one by one
-		for (int i = 0; i < arraySize; i++) {
-			elementIsNull[i] = true;
-			if ((bytes[nullByteCur] & (1 << (i % 8))) != 0) {
-				elementIsNull[i] = false;
-				LazyUtils.checkObjectByteInfo(listEleObjectInspector, bytes,
-						lastElementByteEnd, recordInfo);
-				elementStart[i] = lastElementByteEnd + recordInfo.elementOffset;
-				elementLength[i] = recordInfo.elementSize;
-				lastElementByteEnd = elementStart[i] + elementLength[i];
-			}
-			// move onto the next null byte
-			if (7 == (i % 8)) {
-				nullByteCur++;
-			}
-		}
+        // adjust arrays
+        adjustArraySize(arraySize);
+        // find out the null-bytes
+        int arryByteStart = start + vInt.length;
+        int nullByteCur = arryByteStart;
+        int nullByteEnd = arryByteStart + (arraySize + 7) / 8;
+        // the begin the real elements
+        int lastElementByteEnd = nullByteEnd;
+        // the list element object inspector
+        ObjectInspector listEleObjectInspector = ((ListObjectInspector) oi).getListElementObjectInspector();
+        // parsing elements one by one
+        for (int i = 0; i < arraySize; i++) {
+            elementIsNull[i] = true;
+            if ((bytes[nullByteCur] & (1 << (i % 8))) != 0) {
+                elementIsNull[i] = false;
+                LazyUtils.checkObjectByteInfo(listEleObjectInspector, bytes, lastElementByteEnd, recordInfo);
+                elementStart[i] = lastElementByteEnd + recordInfo.elementOffset;
+                elementLength[i] = recordInfo.elementSize;
+                lastElementByteEnd = elementStart[i] + elementLength[i];
+            }
+            // move onto the next null byte
+            if (7 == (i % 8)) {
+                nullByteCur++;
+            }
+        }
 
-		Arrays.fill(elementInited, 0, arraySize, false);
-		parsed = true;
-	}
+        Arrays.fill(elementInited, 0, arraySize, false);
+        parsed = true;
+    }
 
-	/**
-	 * Returns the actual primitive object at the index position inside the
-	 * array represented by this LazyObject.
-	 */
-	public Object getListElementObject(int index) {
-		if (!parsed) {
-			parse();
-		}
-		if (index < 0 || index >= arraySize) {
-			return null;
-		}
-		return uncheckedGetElement(index);
-	}
+    /**
+     * Returns the actual primitive object at the index position inside the
+     * array represented by this LazyObject.
+     */
+    public Object getListElementObject(int index) {
+        if (!parsed) {
+            parse();
+        }
+        if (index < 0 || index >= arraySize) {
+            return null;
+        }
+        return uncheckedGetElement(index);
+    }
 
-	/**
-	 * Get the element without checking out-of-bound index.
-	 * 
-	 * @param index
-	 *            index to the array element
-	 */
-	private Object uncheckedGetElement(int index) {
+    /**
+     * Get the element without checking out-of-bound index.
+     * 
+     * @param index
+     *            index to the array element
+     */
+    private Object uncheckedGetElement(int index) {
 
-		if (elementIsNull[index]) {
-			return null;
-		} else {
-			if (!elementInited[index]) {
-				elementInited[index] = true;
-				if (arrayElements[index] == null) {
-					arrayElements[index] = LazyFactory.createLazyObject((oi)
-							.getListElementObjectInspector());
-				}
-				arrayElements[index].init(bytes, elementStart[index],
-						elementLength[index]);
-			}
-		}
-		return arrayElements[index].getObject();
-	}
+        if (elementIsNull[index]) {
+            return null;
+        } else {
+            if (!elementInited[index]) {
+                elementInited[index] = true;
+                if (arrayElements[index] == null) {
+                    arrayElements[index] = LazyFactory.createLazyObject((oi).getListElementObjectInspector());
+                }
+                arrayElements[index].init(bytes, elementStart[index], elementLength[index]);
+            }
+        }
+        return arrayElements[index].getObject();
+    }
 
-	/**
-	 * Returns the array size.
-	 */
-	public int getListLength() {
-		if (!parsed) {
-			parse();
-		}
-		return arraySize;
-	}
+    /**
+     * Returns the array size.
+     */
+    public int getListLength() {
+        if (!parsed) {
+            parse();
+        }
+        return arraySize;
+    }
 
-	/**
-	 * cachedList is reused every time getList is called. Different
-	 * LazyBianryArray instances cannot share the same cachedList.
-	 */
-	ArrayList<Object> cachedList;
+    /**
+     * cachedList is reused every time getList is called. Different
+     * LazyBianryArray instances cannot share the same cachedList.
+     */
+    ArrayList<Object> cachedList;
 
-	/**
-	 * Returns the List of actual primitive objects. Returns null for null
-	 * array.
-	 */
-	public List<Object> getList() {
-		if (!parsed) {
-			parse();
-		}
-		if (cachedList == null) {
-			cachedList = new ArrayList<Object>(arraySize);
-		} else {
-			cachedList.clear();
-		}
-		for (int index = 0; index < arraySize; index++) {
-			cachedList.add(uncheckedGetElement(index));
-		}
-		return cachedList;
-	}
+    /**
+     * Returns the List of actual primitive objects. Returns null for null
+     * array.
+     */
+    public List<Object> getList() {
+        if (!parsed) {
+            parse();
+        }
+        if (cachedList == null) {
+            cachedList = new ArrayList<Object>(arraySize);
+        } else {
+            cachedList.clear();
+        }
+        for (int index = 0; index < arraySize; index++) {
+            cachedList.add(uncheckedGetElement(index));
+        }
+        return cachedList;
+    }
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyBoolean.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyBoolean.java
index 83b6254..5a48525 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyBoolean.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyBoolean.java
@@ -23,46 +23,38 @@
 
 /**
  * LazyObject for storing a value of boolean.
- * 
  * <p>
- * Part of the code is adapted from Apache Harmony Project.
- * 
- * As with the specification, this implementation relied on code laid out in <a
- * href="http://www.hackersdelight.org/">Henry S. Warren, Jr.'s Hacker's
- * Delight, (Addison Wesley, 2002)</a> as well as <a
- * href="http://aggregate.org/MAGIC/">The Aggregate's Magic Algorithms</a>.
+ * Part of the code is adapted from Apache Harmony Project. As with the specification, this implementation relied on code laid out in <a href="http://www.hackersdelight.org/">Henry S. Warren, Jr.'s Hacker's Delight, (Addison Wesley, 2002)</a> as well as <a href="http://aggregate.org/MAGIC/">The Aggregate's Magic Algorithms</a>.
  * </p>
- * 
  */
-public class LazyBoolean extends
-		LazyPrimitive<LazyBooleanObjectInspector, BooleanWritable> {
+public class LazyBoolean extends LazyPrimitive<LazyBooleanObjectInspector, BooleanWritable> {
 
-	public LazyBoolean(LazyBooleanObjectInspector oi) {
-		super(oi);
-		data = new BooleanWritable();
-	}
+    public LazyBoolean(LazyBooleanObjectInspector oi) {
+        super(oi);
+        data = new BooleanWritable();
+    }
 
-	public LazyBoolean(LazyBoolean copy) {
-		super(copy);
-		data = new BooleanWritable(copy.data.get());
-	}
+    public LazyBoolean(LazyBoolean copy) {
+        super(copy);
+        data = new BooleanWritable(copy.data.get());
+    }
 
-	@Override
-	public void init(byte[] bytes, int start, int length) {
-		if (length == 0) {
-			isNull = true;
-			return;
-		} else
-			isNull = false;
+    @Override
+    public void init(byte[] bytes, int start, int length) {
+        if (length == 0) {
+            isNull = true;
+            return;
+        } else
+            isNull = false;
 
-		// a temporal hack
-		assert (1 == length);
-		byte val = bytes[start];
-		if (val == 0) {
-			data.set(false);
-		} else if (val == 1) {
-			data.set(true);
-		}
-	}
+        // a temporal hack
+        assert (1 == length);
+        byte val = bytes[start];
+        if (val == 0) {
+            data.set(false);
+        } else if (val == 1) {
+            data.set(true);
+        }
+    }
 
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyByte.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyByte.java
index 264015b..bf4ff04 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyByte.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyByte.java
@@ -23,40 +23,32 @@
 
 /**
  * LazyObject for storing a value of Byte.
- * 
  * <p>
- * Part of the code is adapted from Apache Harmony Project.
- * 
- * As with the specification, this implementation relied on code laid out in <a
- * href="http://www.hackersdelight.org/">Henry S. Warren, Jr.'s Hacker's
- * Delight, (Addison Wesley, 2002)</a> as well as <a
- * href="http://aggregate.org/MAGIC/">The Aggregate's Magic Algorithms</a>.
+ * Part of the code is adapted from Apache Harmony Project. As with the specification, this implementation relied on code laid out in <a href="http://www.hackersdelight.org/">Henry S. Warren, Jr.'s Hacker's Delight, (Addison Wesley, 2002)</a> as well as <a href="http://aggregate.org/MAGIC/">The Aggregate's Magic Algorithms</a>.
  * </p>
- * 
  */
-public class LazyByte extends
-		LazyPrimitive<LazyByteObjectInspector, ByteWritable> {
+public class LazyByte extends LazyPrimitive<LazyByteObjectInspector, ByteWritable> {
 
-	public LazyByte(LazyByteObjectInspector oi) {
-		super(oi);
-		data = new ByteWritable();
-	}
+    public LazyByte(LazyByteObjectInspector oi) {
+        super(oi);
+        data = new ByteWritable();
+    }
 
-	public LazyByte(LazyByte copy) {
-		super(copy);
-		data = new ByteWritable(copy.data.get());
-	}
+    public LazyByte(LazyByte copy) {
+        super(copy);
+        data = new ByteWritable(copy.data.get());
+    }
 
-	@Override
-	public void init(byte[] bytes, int start, int length) {
-		if (length == 0) {
-			isNull = true;
-			return;
-		} else
-			isNull = false;
+    @Override
+    public void init(byte[] bytes, int start, int length) {
+        if (length == 0) {
+            isNull = true;
+            return;
+        } else
+            isNull = false;
 
-		assert (1 == length);
-		data.set(bytes[start]);
-	}
+        assert (1 == length);
+        data.set(bytes[start]);
+    }
 
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyColumnar.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyColumnar.java
index a25ae49..d73fea7 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyColumnar.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyColumnar.java
@@ -30,182 +30,178 @@
 /**
  * LazyObject for storing a struct. The field of a struct can be primitive or
  * non-primitive.
- * 
  * LazyStruct does not deal with the case of a NULL struct. That is handled by
  * the parent LazyObject.
  */
 @SuppressWarnings("rawtypes")
 public class LazyColumnar extends LazyNonPrimitive<LazyColumnarObjectInspector> {
 
-	/**
-	 * IFrameTupleReference: the backend of the struct
-	 */
-	IFrameTupleReference tuple;
+    /**
+     * IFrameTupleReference: the backend of the struct
+     */
+    IFrameTupleReference tuple;
 
-	/**
-	 * Whether the data is already parsed or not.
-	 */
-	boolean reset;
+    /**
+     * Whether the data is already parsed or not.
+     */
+    boolean reset;
 
-	/**
-	 * The fields of the struct.
-	 */
-	LazyObject[] fields;
+    /**
+     * The fields of the struct.
+     */
+    LazyObject[] fields;
 
-	/**
-	 * Whether init() has been called on the field or not.
-	 */
-	boolean[] fieldVisited;
+    /**
+     * Whether init() has been called on the field or not.
+     */
+    boolean[] fieldVisited;
 
-	/**
-	 * whether it is the first time initialization
-	 */
-	boolean start = true;
+    /**
+     * whether it is the first time initialization
+     */
+    boolean start = true;
 
-	/**
-	 * Construct a LazyStruct object with the ObjectInspector.
-	 */
-	public LazyColumnar(LazyColumnarObjectInspector oi) {
-		super(oi);
-	}
+    /**
+     * Construct a LazyStruct object with the ObjectInspector.
+     */
+    public LazyColumnar(LazyColumnarObjectInspector oi) {
+        super(oi);
+    }
 
-	/**
-	 * Set the row data for this LazyStruct.
-	 * 
-	 * @see LazyObject#init(ByteArrayRef, int, int)
-	 */
-	@Override
-	public void init(byte[] bytes, int start, int length) {
-		super.init(bytes, start, length);
-		reset = false;
-	}
+    /**
+     * Set the row data for this LazyStruct.
+     * 
+     * @see LazyObject#init(ByteArrayRef, int, int)
+     */
+    @Override
+    public void init(byte[] bytes, int start, int length) {
+        super.init(bytes, start, length);
+        reset = false;
+    }
 
-	/**
-	 * Parse the byte[] and fill each field.
-	 */
-	private void parse() {
+    /**
+     * Parse the byte[] and fill each field.
+     */
+    private void parse() {
 
-		if (start) {
-			// initialize field array and reusable objects
-			List<? extends StructField> fieldRefs = ((StructObjectInspector) oi)
-					.getAllStructFieldRefs();
+        if (start) {
+            // initialize field array and reusable objects
+            List<? extends StructField> fieldRefs = ((StructObjectInspector) oi).getAllStructFieldRefs();
 
-			fields = new LazyObject[fieldRefs.size()];
-			for (int i = 0; i < fields.length; i++) {
-				fields[i] = LazyFactory.createLazyObject(fieldRefs.get(i)
-						.getFieldObjectInspector());
-			}
-			fieldVisited = new boolean[fields.length];
-			start = false;
-		}
+            fields = new LazyObject[fieldRefs.size()];
+            for (int i = 0; i < fields.length; i++) {
+                fields[i] = LazyFactory.createLazyObject(fieldRefs.get(i).getFieldObjectInspector());
+            }
+            fieldVisited = new boolean[fields.length];
+            start = false;
+        }
 
-		Arrays.fill(fieldVisited, false);
-		reset = true;
-	}
+        Arrays.fill(fieldVisited, false);
+        reset = true;
+    }
 
-	/**
-	 * Get one field out of the struct.
-	 * 
-	 * If the field is a primitive field, return the actual object. Otherwise
-	 * return the LazyObject. This is because PrimitiveObjectInspector does not
-	 * have control over the object used by the user - the user simply directly
-	 * use the Object instead of going through Object
-	 * PrimitiveObjectInspector.get(Object).
-	 * 
-	 * @param fieldID
-	 *            The field ID
-	 * @return The field as a LazyObject
-	 */
-	public Object getField(int fieldID) {
-		if (!reset) {
-			parse();
-		}
-		return uncheckedGetField(fieldID);
-	}
+    /**
+     * Get one field out of the struct.
+     * If the field is a primitive field, return the actual object. Otherwise
+     * return the LazyObject. This is because PrimitiveObjectInspector does not
+     * have control over the object used by the user - the user simply directly
+     * use the Object instead of going through Object
+     * PrimitiveObjectInspector.get(Object).
+     * 
+     * @param fieldID
+     *            The field ID
+     * @return The field as a LazyObject
+     */
+    public Object getField(int fieldID) {
+        if (!reset) {
+            parse();
+        }
+        return uncheckedGetField(fieldID);
+    }
 
-	/**
-	 * Get the field out of the row without checking parsed. This is called by
-	 * both getField and getFieldsAsList.
-	 * 
-	 * @param fieldID
-	 *            The id of the field starting from 0.
-	 * @param nullSequence
-	 *            The sequence representing NULL value.
-	 * @return The value of the field
-	 */
-	private Object uncheckedGetField(int fieldID) {
-		// get the buffer
-		byte[] buffer = tuple.getFieldData(fieldID);
-		// get the offset of the field
-		int s1 = tuple.getFieldStart(fieldID);
-		int l1 = tuple.getFieldLength(fieldID);
+    /**
+     * Get the field out of the row without checking parsed. This is called by
+     * both getField and getFieldsAsList.
+     * 
+     * @param fieldID
+     *            The id of the field starting from 0.
+     * @param nullSequence
+     *            The sequence representing NULL value.
+     * @return The value of the field
+     */
+    private Object uncheckedGetField(int fieldID) {
+        // get the buffer
+        byte[] buffer = tuple.getFieldData(fieldID);
+        // get the offset of the field
+        int s1 = tuple.getFieldStart(fieldID);
+        int l1 = tuple.getFieldLength(fieldID);
 
-		if (!fieldVisited[fieldID]) {
-			fieldVisited[fieldID] = true;
-			fields[fieldID].init(buffer, s1, l1);
-		}
-		// if (fields[fieldID].getObject() == null) {
-		// throw new IllegalStateException("illegal field " + fieldID);
-		// }
-		return fields[fieldID].getObject();
-	}
+        if (!fieldVisited[fieldID]) {
+            fieldVisited[fieldID] = true;
+            fields[fieldID].init(buffer, s1, l1);
+        }
+        // if (fields[fieldID].getObject() == null) {
+        // throw new IllegalStateException("illegal field " + fieldID);
+        // }
+        return fields[fieldID].getObject();
+    }
 
-	ArrayList<Object> cachedList;
+    ArrayList<Object> cachedList;
 
-	/**
-	 * Get the values of the fields as an ArrayList.
-	 * 
-	 * @return The values of the fields as an ArrayList.
-	 */
-	public ArrayList<Object> getFieldsAsList() {
-		if (!reset) {
-			parse();
-		}
-		if (cachedList == null) {
-			cachedList = new ArrayList<Object>();
-		} else {
-			cachedList.clear();
-		}
-		for (int i = 0; i < fields.length; i++) {
-			cachedList.add(uncheckedGetField(i));
-		}
-		return cachedList;
-	}
+    /**
+     * Get the values of the fields as an ArrayList.
+     * 
+     * @return The values of the fields as an ArrayList.
+     */
+    public ArrayList<Object> getFieldsAsList() {
+        if (!reset) {
+            parse();
+        }
+        if (cachedList == null) {
+            cachedList = new ArrayList<Object>();
+        } else {
+            cachedList.clear();
+        }
+        for (int i = 0; i < fields.length; i++) {
+            cachedList.add(uncheckedGetField(i));
+        }
+        return cachedList;
+    }
 
-	@Override
-	public Object getObject() {
-		return this;
-	}
+    @Override
+    public Object getObject() {
+        return this;
+    }
 
-	protected boolean getParsed() {
-		return reset;
-	}
+    protected boolean getParsed() {
+        return reset;
+    }
 
-	protected void setParsed(boolean parsed) {
-		this.reset = parsed;
-	}
+    protected void setParsed(boolean parsed) {
+        this.reset = parsed;
+    }
 
-	protected LazyObject[] getFields() {
-		return fields;
-	}
+    protected LazyObject[] getFields() {
+        return fields;
+    }
 
-	protected void setFields(LazyObject[] fields) {
-		this.fields = fields;
-	}
+    protected void setFields(LazyObject[] fields) {
+        this.fields = fields;
+    }
 
-	protected boolean[] getFieldInited() {
-		return fieldVisited;
-	}
+    protected boolean[] getFieldInited() {
+        return fieldVisited;
+    }
 
-	protected void setFieldInited(boolean[] fieldInited) {
-		this.fieldVisited = fieldInited;
-	}
+    protected void setFieldInited(boolean[] fieldInited) {
+        this.fieldVisited = fieldInited;
+    }
 
-	/**
-	 * rebind a frametuplereference to the struct
-	 */
-	public void init(IFrameTupleReference r) {
-		this.tuple = r;
-		reset = false;
-	}
+    /**
+     * rebind a frametuplereference to the struct
+     */
+    public void init(IFrameTupleReference r) {
+        this.tuple = r;
+        reset = false;
+    }
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyDouble.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyDouble.java
index d687aa1..1b2cc5a 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyDouble.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyDouble.java
@@ -23,31 +23,28 @@
 
 /**
  * LazyObject for storing a value of Double.
- * 
  */
-public class LazyDouble extends
-		LazyPrimitive<LazyDoubleObjectInspector, DoubleWritable> {
+public class LazyDouble extends LazyPrimitive<LazyDoubleObjectInspector, DoubleWritable> {
 
-	public LazyDouble(LazyDoubleObjectInspector oi) {
-		super(oi);
-		data = new DoubleWritable();
-	}
+    public LazyDouble(LazyDoubleObjectInspector oi) {
+        super(oi);
+        data = new DoubleWritable();
+    }
 
-	public LazyDouble(LazyDouble copy) {
-		super(copy);
-		data = new DoubleWritable(copy.data.get());
-	}
+    public LazyDouble(LazyDouble copy) {
+        super(copy);
+        data = new DoubleWritable(copy.data.get());
+    }
 
-	@Override
-	public void init(byte[] bytes, int start, int length) {
-		if (length == 0) {
-			isNull = true;
-			return;
-		} else
-			isNull = false;
-		assert (8 == length);
-		data.set(Double.longBitsToDouble(LazyUtils
-				.byteArrayToLong(bytes, start)));
-	}
+    @Override
+    public void init(byte[] bytes, int start, int length) {
+        if (length == 0) {
+            isNull = true;
+            return;
+        } else
+            isNull = false;
+        assert (8 == length);
+        data.set(Double.longBitsToDouble(LazyUtils.byteArrayToLong(bytes, start)));
+    }
 
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyFactory.java
index e7593e4..7caa9ed 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyFactory.java
@@ -36,62 +36,59 @@
 
 /**
  * LazyFactory.
- * 
  */
 public final class LazyFactory {
 
-	/**
-	 * Create a lazy binary primitive class given the type name.
-	 */
-	public static LazyPrimitive<?, ?> createLazyPrimitiveClass(
-			PrimitiveObjectInspector oi) {
-		PrimitiveCategory p = oi.getPrimitiveCategory();
-		switch (p) {
-		case BOOLEAN:
-			return new LazyBoolean((LazyBooleanObjectInspector) oi);
-		case BYTE:
-			return new LazyByte((LazyByteObjectInspector) oi);
-		case SHORT:
-			return new LazyShort((LazyShortObjectInspector) oi);
-		case INT:
-			return new LazyInteger((LazyIntObjectInspector) oi);
-		case LONG:
-			return new LazyLong((LazyLongObjectInspector) oi);
-		case FLOAT:
-			return new LazyFloat((LazyFloatObjectInspector) oi);
-		case DOUBLE:
-			return new LazyDouble((LazyDoubleObjectInspector) oi);
-		case STRING:
-			return new LazyString((LazyStringObjectInspector) oi);
-		default:
-			throw new RuntimeException("Internal error: no LazyObject for " + p);
-		}
-	}
+    /**
+     * Create a lazy binary primitive class given the type name.
+     */
+    public static LazyPrimitive<?, ?> createLazyPrimitiveClass(PrimitiveObjectInspector oi) {
+        PrimitiveCategory p = oi.getPrimitiveCategory();
+        switch (p) {
+            case BOOLEAN:
+                return new LazyBoolean((LazyBooleanObjectInspector) oi);
+            case BYTE:
+                return new LazyByte((LazyByteObjectInspector) oi);
+            case SHORT:
+                return new LazyShort((LazyShortObjectInspector) oi);
+            case INT:
+                return new LazyInteger((LazyIntObjectInspector) oi);
+            case LONG:
+                return new LazyLong((LazyLongObjectInspector) oi);
+            case FLOAT:
+                return new LazyFloat((LazyFloatObjectInspector) oi);
+            case DOUBLE:
+                return new LazyDouble((LazyDoubleObjectInspector) oi);
+            case STRING:
+                return new LazyString((LazyStringObjectInspector) oi);
+            default:
+                throw new RuntimeException("Internal error: no LazyObject for " + p);
+        }
+    }
 
-	/**
-	 * Create a hierarchical LazyObject based on the given typeInfo.
-	 */
-	public static LazyObject<? extends ObjectInspector> createLazyObject(
-			ObjectInspector oi) {
-		ObjectInspector.Category c = oi.getCategory();
-		switch (c) {
-		case PRIMITIVE:
-			return createLazyPrimitiveClass((PrimitiveObjectInspector) oi);
-		case MAP:
-			return new LazyMap((LazyMapObjectInspector) oi);
-		case LIST:
-			return new LazyArray((LazyListObjectInspector) oi);
-		case STRUCT: // check whether it is a top-level struct
-			if (oi instanceof LazyStructObjectInspector)
-				return new LazyStruct((LazyStructObjectInspector) oi);
-			else
-				return new LazyColumnar((LazyColumnarObjectInspector) oi);
-		default:
-			throw new RuntimeException("Hive LazySerDe Internal error.");
-		}
-	}
+    /**
+     * Create a hierarchical LazyObject based on the given typeInfo.
+     */
+    public static LazyObject<? extends ObjectInspector> createLazyObject(ObjectInspector oi) {
+        ObjectInspector.Category c = oi.getCategory();
+        switch (c) {
+            case PRIMITIVE:
+                return createLazyPrimitiveClass((PrimitiveObjectInspector) oi);
+            case MAP:
+                return new LazyMap((LazyMapObjectInspector) oi);
+            case LIST:
+                return new LazyArray((LazyListObjectInspector) oi);
+            case STRUCT: // check whether it is a top-level struct
+                if (oi instanceof LazyStructObjectInspector)
+                    return new LazyStruct((LazyStructObjectInspector) oi);
+                else
+                    return new LazyColumnar((LazyColumnarObjectInspector) oi);
+            default:
+                throw new RuntimeException("Hive LazySerDe Internal error.");
+        }
+    }
 
-	private LazyFactory() {
-		// prevent instantiation
-	}
+    private LazyFactory() {
+        // prevent instantiation
+    }
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyFloat.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyFloat.java
index 303cc67..430ac2e 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyFloat.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyFloat.java
@@ -23,31 +23,29 @@
 
 /**
  * LazyObject for storing a value of Double.
- * 
  */
-public class LazyFloat extends
-		LazyPrimitive<LazyFloatObjectInspector, FloatWritable> {
+public class LazyFloat extends LazyPrimitive<LazyFloatObjectInspector, FloatWritable> {
 
-	public LazyFloat(LazyFloatObjectInspector oi) {
-		super(oi);
-		data = new FloatWritable();
-	}
+    public LazyFloat(LazyFloatObjectInspector oi) {
+        super(oi);
+        data = new FloatWritable();
+    }
 
-	public LazyFloat(LazyFloat copy) {
-		super(copy);
-		data = new FloatWritable(copy.data.get());
-	}
+    public LazyFloat(LazyFloat copy) {
+        super(copy);
+        data = new FloatWritable(copy.data.get());
+    }
 
-	@Override
-	public void init(byte[] bytes, int start, int length) {
-		if (length == 0) {
-			isNull = true;
-			return;
-		} else
-			isNull = false;
+    @Override
+    public void init(byte[] bytes, int start, int length) {
+        if (length == 0) {
+            isNull = true;
+            return;
+        } else
+            isNull = false;
 
-		assert (4 == length);
-		data.set(Float.intBitsToFloat(LazyUtils.byteArrayToInt(bytes, start)));
-	}
+        assert (4 == length);
+        data.set(Float.intBitsToFloat(LazyUtils.byteArrayToInt(bytes, start)));
+    }
 
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyInteger.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyInteger.java
index c908c40..0765c4f 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyInteger.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyInteger.java
@@ -24,49 +24,40 @@
 
 /**
  * LazyObject for storing a value of Integer.
- * 
  * <p>
- * Part of the code is adapted from Apache Harmony Project.
- * 
- * As with the specification, this implementation relied on code laid out in <a
- * href="http://www.hackersdelight.org/">Henry S. Warren, Jr.'s Hacker's
- * Delight, (Addison Wesley, 2002)</a> as well as <a
- * href="http://aggregate.org/MAGIC/">The Aggregate's Magic Algorithms</a>.
+ * Part of the code is adapted from Apache Harmony Project. As with the specification, this implementation relied on code laid out in <a href="http://www.hackersdelight.org/">Henry S. Warren, Jr.'s Hacker's Delight, (Addison Wesley, 2002)</a> as well as <a href="http://aggregate.org/MAGIC/">The Aggregate's Magic Algorithms</a>.
  * </p>
- * 
  */
-public class LazyInteger extends
-		LazyPrimitive<LazyIntObjectInspector, IntWritable> {
+public class LazyInteger extends LazyPrimitive<LazyIntObjectInspector, IntWritable> {
 
-	public LazyInteger(LazyIntObjectInspector oi) {
-		super(oi);
-		data = new IntWritable();
-	}
+    public LazyInteger(LazyIntObjectInspector oi) {
+        super(oi);
+        data = new IntWritable();
+    }
 
-	public LazyInteger(LazyInteger copy) {
-		super(copy);
-		data = new IntWritable(copy.data.get());
-	}
+    public LazyInteger(LazyInteger copy) {
+        super(copy);
+        data = new IntWritable(copy.data.get());
+    }
 
-	/**
-	 * The reusable vInt for decoding the integer.
-	 */
-	VInt vInt = new LazyUtils.VInt();
+    /**
+     * The reusable vInt for decoding the integer.
+     */
+    VInt vInt = new LazyUtils.VInt();
 
-	@Override
-	public void init(byte[] bytes, int start, int length) {
-		if (length == 0) {
-			isNull = true;
-			return;
-		} else
-			isNull = false;
+    @Override
+    public void init(byte[] bytes, int start, int length) {
+        if (length == 0) {
+            isNull = true;
+            return;
+        } else
+            isNull = false;
 
-		LazyUtils.readVInt(bytes, start, vInt);
-		assert (length == vInt.length);
-		if (length != vInt.length)
-			throw new IllegalStateException(
-					"parse int: length mismatch, expected " + vInt.length
-							+ " but get " + length);
-		data.set(vInt.value);
-	}
+        LazyUtils.readVInt(bytes, start, vInt);
+        assert (length == vInt.length);
+        if (length != vInt.length)
+            throw new IllegalStateException("parse int: length mismatch, expected " + vInt.length + " but get "
+                    + length);
+        data.set(vInt.value);
+    }
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyLong.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyLong.java
index 38097e6..e6b56c3 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyLong.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyLong.java
@@ -24,48 +24,40 @@
 
 /**
  * LazyObject for storing a value of Long.
- * 
  * <p>
- * Part of the code is adapted from Apache Harmony Project.
- * 
- * As with the specification, this implementation relied on code laid out in <a
- * href="http://www.hackersdelight.org/">Henry S. Warren, Jr.'s Hacker's
- * Delight, (Addison Wesley, 2002)</a> as well as <a
- * href="http://aggregate.org/MAGIC/">The Aggregate's Magic Algorithms</a>.
+ * Part of the code is adapted from Apache Harmony Project. As with the specification, this implementation relied on code laid out in <a href="http://www.hackersdelight.org/">Henry S. Warren, Jr.'s Hacker's Delight, (Addison Wesley, 2002)</a> as well as <a href="http://aggregate.org/MAGIC/">The Aggregate's Magic Algorithms</a>.
  * </p>
- * 
  */
-public class LazyLong extends
-		LazyPrimitive<LazyLongObjectInspector, LongWritable> {
+public class LazyLong extends LazyPrimitive<LazyLongObjectInspector, LongWritable> {
 
-	public LazyLong(LazyLongObjectInspector oi) {
-		super(oi);
-		data = new LongWritable();
-	}
+    public LazyLong(LazyLongObjectInspector oi) {
+        super(oi);
+        data = new LongWritable();
+    }
 
-	public LazyLong(LazyLong copy) {
-		super(copy);
-		data = new LongWritable(copy.data.get());
-	}
+    public LazyLong(LazyLong copy) {
+        super(copy);
+        data = new LongWritable(copy.data.get());
+    }
 
-	/**
-	 * The reusable vLong for decoding the long.
-	 */
-	VLong vLong = new LazyUtils.VLong();
+    /**
+     * The reusable vLong for decoding the long.
+     */
+    VLong vLong = new LazyUtils.VLong();
 
-	@Override
-	public void init(byte[] bytes, int start, int length) {
-		if (length == 0) {
-			isNull = true;
-			return;
-		} else
-			isNull = false;
+    @Override
+    public void init(byte[] bytes, int start, int length) {
+        if (length == 0) {
+            isNull = true;
+            return;
+        } else
+            isNull = false;
 
-		LazyUtils.readVLong(bytes, start, vLong);
-		assert (length == vLong.length);
-		if (length != vLong.length)
-			throw new IllegalStateException("parse long: length mismatch");
-		data.set(vLong.value);
-	}
+        LazyUtils.readVLong(bytes, start, vLong);
+        assert (length == vLong.length);
+        if (length != vLong.length)
+            throw new IllegalStateException("parse long: length mismatch");
+        data.set(vLong.value);
+    }
 
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyMap.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyMap.java
index 56bc41b..9c7af2e 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyMap.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyMap.java
@@ -34,304 +34,294 @@
 /**
  * LazyMap is serialized as follows: start A b c b c b c end bytes[] ->
  * |--------|---|---|---|---| ... |---|---|
- * 
  * Section A is the null-bytes. Suppose the map has N key-value pairs, then
  * there are (N*2+7)/8 bytes used as null-bytes. Each bit corresponds to a key
  * or a value and it indicates whether that key or value is null (0) or not null
  * (1).
- * 
  * After A, all the bytes are actual serialized data of the map, which are
  * key-value pairs. b represent the keys and c represent the values. Each of
  * them is again a LazyObject.
- * 
  */
 
 @SuppressWarnings("rawtypes")
 public class LazyMap extends LazyNonPrimitive<LazyMapObjectInspector> {
 
-	private static Log LOG = LogFactory.getLog(LazyMap.class.getName());
+    private static Log LOG = LogFactory.getLog(LazyMap.class.getName());
 
-	/**
-	 * Whether the data is already parsed or not.
-	 */
-	boolean parsed;
+    /**
+     * Whether the data is already parsed or not.
+     */
+    boolean parsed;
 
-	/**
-	 * The size of the map. Only valid when the data is parsed. -1 when the map
-	 * is NULL.
-	 */
-	int mapSize = 0;
+    /**
+     * The size of the map. Only valid when the data is parsed. -1 when the map
+     * is NULL.
+     */
+    int mapSize = 0;
 
-	/**
-	 * The beginning position and length of key[i] and value[i]. Only valid when
-	 * the data is parsed.
-	 */
-	int[] keyStart;
-	int[] keyLength;
-	int[] valueStart;
-	int[] valueLength;
-	/**
-	 * Whether valueObjects[i]/keyObjects[i] is initialized or not.
-	 */
-	boolean[] keyInited;
-	boolean[] valueInited;
+    /**
+     * The beginning position and length of key[i] and value[i]. Only valid when
+     * the data is parsed.
+     */
+    int[] keyStart;
+    int[] keyLength;
+    int[] valueStart;
+    int[] valueLength;
+    /**
+     * Whether valueObjects[i]/keyObjects[i] is initialized or not.
+     */
+    boolean[] keyInited;
+    boolean[] valueInited;
 
-	/**
-	 * Whether valueObjects[i]/keyObjects[i] is null or not This could not be
-	 * inferred from the length of the object. In particular, a 0-length string
-	 * is not null.
-	 */
-	boolean[] keyIsNull;
-	boolean[] valueIsNull;
+    /**
+     * Whether valueObjects[i]/keyObjects[i] is null or not This could not be
+     * inferred from the length of the object. In particular, a 0-length string
+     * is not null.
+     */
+    boolean[] keyIsNull;
+    boolean[] valueIsNull;
 
-	/**
-	 * The keys are stored in an array of LazyPrimitives.
-	 */
-	LazyPrimitive<?, ?>[] keyObjects;
-	/**
-	 * The values are stored in an array of LazyObjects. value[index] will start
-	 * from KeyEnd[index] + 1, and ends before KeyStart[index+1] - 1.
-	 */
-	LazyObject[] valueObjects;
+    /**
+     * The keys are stored in an array of LazyPrimitives.
+     */
+    LazyPrimitive<?, ?>[] keyObjects;
+    /**
+     * The values are stored in an array of LazyObjects. value[index] will start
+     * from KeyEnd[index] + 1, and ends before KeyStart[index+1] - 1.
+     */
+    LazyObject[] valueObjects;
 
-	protected LazyMap(LazyMapObjectInspector oi) {
-		super(oi);
-	}
+    protected LazyMap(LazyMapObjectInspector oi) {
+        super(oi);
+    }
 
-	/**
-	 * Set the row data for this LazyMap.
-	 * 
-	 * @see LazyObject#init(ByteArrayRef, int, int)
-	 */
-	@Override
-	public void init(byte[] bytes, int start, int length) {
-		super.init(bytes, start, length);
-		parsed = false;
-	}
+    /**
+     * Set the row data for this LazyMap.
+     * 
+     * @see LazyObject#init(ByteArrayRef, int, int)
+     */
+    @Override
+    public void init(byte[] bytes, int start, int length) {
+        super.init(bytes, start, length);
+        parsed = false;
+    }
 
-	/**
-	 * Adjust the size of arrays: keyStart, keyLength valueStart, valueLength
-	 * keyInited, keyIsNull valueInited, valueIsNull.
-	 */
-	protected void adjustArraySize(int newSize) {
-		if (keyStart == null || keyStart.length < newSize) {
-			keyStart = new int[newSize];
-			keyLength = new int[newSize];
-			valueStart = new int[newSize];
-			valueLength = new int[newSize];
-			keyInited = new boolean[newSize];
-			keyIsNull = new boolean[newSize];
-			valueInited = new boolean[newSize];
-			valueIsNull = new boolean[newSize];
-			keyObjects = new LazyPrimitive<?, ?>[newSize];
-			valueObjects = new LazyObject[newSize];
-		}
-	}
+    /**
+     * Adjust the size of arrays: keyStart, keyLength valueStart, valueLength
+     * keyInited, keyIsNull valueInited, valueIsNull.
+     */
+    protected void adjustArraySize(int newSize) {
+        if (keyStart == null || keyStart.length < newSize) {
+            keyStart = new int[newSize];
+            keyLength = new int[newSize];
+            valueStart = new int[newSize];
+            valueLength = new int[newSize];
+            keyInited = new boolean[newSize];
+            keyIsNull = new boolean[newSize];
+            valueInited = new boolean[newSize];
+            valueIsNull = new boolean[newSize];
+            keyObjects = new LazyPrimitive<?, ?>[newSize];
+            valueObjects = new LazyObject[newSize];
+        }
+    }
 
-	boolean nullMapKey = false;
-	VInt vInt = new LazyUtils.VInt();
-	RecordInfo recordInfo = new LazyUtils.RecordInfo();
+    boolean nullMapKey = false;
+    VInt vInt = new LazyUtils.VInt();
+    RecordInfo recordInfo = new LazyUtils.RecordInfo();
 
-	/**
-	 * Parse the byte[] and fill keyStart, keyLength, keyIsNull valueStart,
-	 * valueLength and valueIsNull.
-	 */
-	private void parse() {
+    /**
+     * Parse the byte[] and fill keyStart, keyLength, keyIsNull valueStart,
+     * valueLength and valueIsNull.
+     */
+    private void parse() {
 
-		// get the VInt that represents the map size
-		LazyUtils.readVInt(bytes, start, vInt);
-		mapSize = vInt.value;
-		if (0 == mapSize) {
-			parsed = true;
-			return;
-		}
+        // get the VInt that represents the map size
+        LazyUtils.readVInt(bytes, start, vInt);
+        mapSize = vInt.value;
+        if (0 == mapSize) {
+            parsed = true;
+            return;
+        }
 
-		// adjust arrays
-		adjustArraySize(mapSize);
+        // adjust arrays
+        adjustArraySize(mapSize);
 
-		// find out the null-bytes
-		int mapByteStart = start + vInt.length;
-		int nullByteCur = mapByteStart;
-		int nullByteEnd = mapByteStart + (mapSize * 2 + 7) / 8;
-		int lastElementByteEnd = nullByteEnd;
+        // find out the null-bytes
+        int mapByteStart = start + vInt.length;
+        int nullByteCur = mapByteStart;
+        int nullByteEnd = mapByteStart + (mapSize * 2 + 7) / 8;
+        int lastElementByteEnd = nullByteEnd;
 
-		// parsing the keys and values one by one
-		for (int i = 0; i < mapSize; i++) {
-			// parse a key
-			keyIsNull[i] = true;
-			if ((bytes[nullByteCur] & (1 << ((i * 2) % 8))) != 0) {
-				keyIsNull[i] = false;
-				LazyUtils.checkObjectByteInfo(
-						((MapObjectInspector) oi).getMapKeyObjectInspector(),
-						bytes, lastElementByteEnd, recordInfo);
-				keyStart[i] = lastElementByteEnd + recordInfo.elementOffset;
-				keyLength[i] = recordInfo.elementSize;
-				lastElementByteEnd = keyStart[i] + keyLength[i];
-			} else if (!nullMapKey) {
-				nullMapKey = true;
-				LOG.warn("Null map key encountered! Ignoring similar problems.");
-			}
+        // parsing the keys and values one by one
+        for (int i = 0; i < mapSize; i++) {
+            // parse a key
+            keyIsNull[i] = true;
+            if ((bytes[nullByteCur] & (1 << ((i * 2) % 8))) != 0) {
+                keyIsNull[i] = false;
+                LazyUtils.checkObjectByteInfo(((MapObjectInspector) oi).getMapKeyObjectInspector(), bytes,
+                        lastElementByteEnd, recordInfo);
+                keyStart[i] = lastElementByteEnd + recordInfo.elementOffset;
+                keyLength[i] = recordInfo.elementSize;
+                lastElementByteEnd = keyStart[i] + keyLength[i];
+            } else if (!nullMapKey) {
+                nullMapKey = true;
+                LOG.warn("Null map key encountered! Ignoring similar problems.");
+            }
 
-			// parse a value
-			valueIsNull[i] = true;
-			if ((bytes[nullByteCur] & (1 << ((i * 2 + 1) % 8))) != 0) {
-				valueIsNull[i] = false;
-				LazyUtils.checkObjectByteInfo(
-						((MapObjectInspector) oi).getMapValueObjectInspector(),
-						bytes, lastElementByteEnd, recordInfo);
-				valueStart[i] = lastElementByteEnd + recordInfo.elementOffset;
-				valueLength[i] = recordInfo.elementSize;
-				lastElementByteEnd = valueStart[i] + valueLength[i];
-			}
+            // parse a value
+            valueIsNull[i] = true;
+            if ((bytes[nullByteCur] & (1 << ((i * 2 + 1) % 8))) != 0) {
+                valueIsNull[i] = false;
+                LazyUtils.checkObjectByteInfo(((MapObjectInspector) oi).getMapValueObjectInspector(), bytes,
+                        lastElementByteEnd, recordInfo);
+                valueStart[i] = lastElementByteEnd + recordInfo.elementOffset;
+                valueLength[i] = recordInfo.elementSize;
+                lastElementByteEnd = valueStart[i] + valueLength[i];
+            }
 
-			// move onto the next null byte
-			if (3 == (i % 4)) {
-				nullByteCur++;
-			}
-		}
+            // move onto the next null byte
+            if (3 == (i % 4)) {
+                nullByteCur++;
+            }
+        }
 
-		Arrays.fill(keyInited, 0, mapSize, false);
-		Arrays.fill(valueInited, 0, mapSize, false);
-		parsed = true;
-	}
+        Arrays.fill(keyInited, 0, mapSize, false);
+        Arrays.fill(valueInited, 0, mapSize, false);
+        parsed = true;
+    }
 
-	/**
-	 * Get the value object with the index without checking parsed.
-	 * 
-	 * @param index
-	 *            The index into the array starting from 0
-	 */
-	private LazyObject uncheckedGetValue(int index) {
-		if (valueIsNull[index]) {
-			return null;
-		}
-		if (!valueInited[index]) {
-			valueInited[index] = true;
-			if (valueObjects[index] == null) {
-				valueObjects[index] = LazyFactory
-						.createLazyObject(((MapObjectInspector) oi)
-								.getMapValueObjectInspector());
-			}
-			valueObjects[index].init(bytes, valueStart[index],
-					valueLength[index]);
-		}
-		return valueObjects[index];
-	}
+    /**
+     * Get the value object with the index without checking parsed.
+     * 
+     * @param index
+     *            The index into the array starting from 0
+     */
+    private LazyObject uncheckedGetValue(int index) {
+        if (valueIsNull[index]) {
+            return null;
+        }
+        if (!valueInited[index]) {
+            valueInited[index] = true;
+            if (valueObjects[index] == null) {
+                valueObjects[index] = LazyFactory.createLazyObject(((MapObjectInspector) oi)
+                        .getMapValueObjectInspector());
+            }
+            valueObjects[index].init(bytes, valueStart[index], valueLength[index]);
+        }
+        return valueObjects[index];
+    }
 
-	/**
-	 * Get the value in the map for the key.
-	 * 
-	 * If there are multiple matches (which is possible in the serialized
-	 * format), only the first one is returned.
-	 * 
-	 * The most efficient way to get the value for the key is to serialize the
-	 * key and then try to find it in the array. We do linear search because in
-	 * most cases, user only wants to get one or two values out of the map, and
-	 * the cost of building up a HashMap is substantially higher.
-	 * 
-	 * @param key
-	 *            The key object that we are looking for.
-	 * @return The corresponding value object, or NULL if not found
-	 */
-	public Object getMapValueElement(Object key) {
-		if (!parsed) {
-			parse();
-		}
-		// search for the key
-		for (int i = 0; i < mapSize; i++) {
-			LazyPrimitive<?, ?> lazyKeyI = uncheckedGetKey(i);
-			if (lazyKeyI == null) {
-				continue;
-			}
-			// getWritableObject() will convert LazyPrimitive to actual
-			// primitive
-			// writable objects.
-			Object keyI = lazyKeyI.getWritableObject();
-			if (keyI == null) {
-				continue;
-			}
-			if (keyI.equals(key)) {
-				// Got a match, return the value
-				LazyObject v = uncheckedGetValue(i);
-				return v == null ? v : v.getObject();
-			}
-		}
-		return null;
-	}
+    /**
+     * Get the value in the map for the key.
+     * If there are multiple matches (which is possible in the serialized
+     * format), only the first one is returned.
+     * The most efficient way to get the value for the key is to serialize the
+     * key and then try to find it in the array. We do linear search because in
+     * most cases, user only wants to get one or two values out of the map, and
+     * the cost of building up a HashMap is substantially higher.
+     * 
+     * @param key
+     *            The key object that we are looking for.
+     * @return The corresponding value object, or NULL if not found
+     */
+    public Object getMapValueElement(Object key) {
+        if (!parsed) {
+            parse();
+        }
+        // search for the key
+        for (int i = 0; i < mapSize; i++) {
+            LazyPrimitive<?, ?> lazyKeyI = uncheckedGetKey(i);
+            if (lazyKeyI == null) {
+                continue;
+            }
+            // getWritableObject() will convert LazyPrimitive to actual
+            // primitive
+            // writable objects.
+            Object keyI = lazyKeyI.getWritableObject();
+            if (keyI == null) {
+                continue;
+            }
+            if (keyI.equals(key)) {
+                // Got a match, return the value
+                LazyObject v = uncheckedGetValue(i);
+                return v == null ? v : v.getObject();
+            }
+        }
+        return null;
+    }
 
-	/**
-	 * Get the key object with the index without checking parsed.
-	 * 
-	 * @param index
-	 *            The index into the array starting from 0
-	 */
-	private LazyPrimitive<?, ?> uncheckedGetKey(int index) {
-		if (keyIsNull[index]) {
-			return null;
-		}
-		if (!keyInited[index]) {
-			keyInited[index] = true;
-			if (keyObjects[index] == null) {
-				// Keys are always primitive
-				keyObjects[index] = LazyFactory
-						.createLazyPrimitiveClass((PrimitiveObjectInspector) ((MapObjectInspector) oi)
-								.getMapKeyObjectInspector());
-			}
-			keyObjects[index].init(bytes, keyStart[index], keyLength[index]);
-		}
-		return keyObjects[index];
-	}
+    /**
+     * Get the key object with the index without checking parsed.
+     * 
+     * @param index
+     *            The index into the array starting from 0
+     */
+    private LazyPrimitive<?, ?> uncheckedGetKey(int index) {
+        if (keyIsNull[index]) {
+            return null;
+        }
+        if (!keyInited[index]) {
+            keyInited[index] = true;
+            if (keyObjects[index] == null) {
+                // Keys are always primitive
+                keyObjects[index] = LazyFactory
+                        .createLazyPrimitiveClass((PrimitiveObjectInspector) ((MapObjectInspector) oi)
+                                .getMapKeyObjectInspector());
+            }
+            keyObjects[index].init(bytes, keyStart[index], keyLength[index]);
+        }
+        return keyObjects[index];
+    }
 
-	/**
-	 * cachedMap is reused for different calls to getMap(). But each LazyMap has
-	 * a separate cachedMap so we won't overwrite the data by accident.
-	 */
-	LinkedHashMap<Object, Object> cachedMap;
+    /**
+     * cachedMap is reused for different calls to getMap(). But each LazyMap has
+     * a separate cachedMap so we won't overwrite the data by accident.
+     */
+    LinkedHashMap<Object, Object> cachedMap;
 
-	/**
-	 * Return the map object representing this LazyMap. Note that the keyObjects
-	 * will be Writable primitive objects.
-	 * 
-	 * @return the map object
-	 */
-	public Map<Object, Object> getMap() {
-		if (!parsed) {
-			parse();
-		}
-		if (cachedMap == null) {
-			// Use LinkedHashMap to provide deterministic order
-			cachedMap = new LinkedHashMap<Object, Object>();
-		} else {
-			cachedMap.clear();
-		}
+    /**
+     * Return the map object representing this LazyMap. Note that the keyObjects
+     * will be Writable primitive objects.
+     * 
+     * @return the map object
+     */
+    public Map<Object, Object> getMap() {
+        if (!parsed) {
+            parse();
+        }
+        if (cachedMap == null) {
+            // Use LinkedHashMap to provide deterministic order
+            cachedMap = new LinkedHashMap<Object, Object>();
+        } else {
+            cachedMap.clear();
+        }
 
-		// go through each element of the map
-		for (int i = 0; i < mapSize; i++) {
-			LazyPrimitive<?, ?> lazyKey = uncheckedGetKey(i);
-			if (lazyKey == null) {
-				continue;
-			}
-			Object key = lazyKey.getObject();
-			// do not overwrite if there are duplicate keys
-			if (key != null && !cachedMap.containsKey(key)) {
-				LazyObject lazyValue = uncheckedGetValue(i);
-				Object value = (lazyValue == null ? null : lazyValue
-						.getObject());
-				cachedMap.put(key, value);
-			}
-		}
-		return cachedMap;
-	}
+        // go through each element of the map
+        for (int i = 0; i < mapSize; i++) {
+            LazyPrimitive<?, ?> lazyKey = uncheckedGetKey(i);
+            if (lazyKey == null) {
+                continue;
+            }
+            Object key = lazyKey.getObject();
+            // do not overwrite if there are duplicate keys
+            if (key != null && !cachedMap.containsKey(key)) {
+                LazyObject lazyValue = uncheckedGetValue(i);
+                Object value = (lazyValue == null ? null : lazyValue.getObject());
+                cachedMap.put(key, value);
+            }
+        }
+        return cachedMap;
+    }
 
-	/**
-	 * Get the size of the map represented by this LazyMap.
-	 * 
-	 * @return The size of the map
-	 */
-	public int getMapSize() {
-		if (!parsed) {
-			parse();
-		}
-		return mapSize;
-	}
+    /**
+     * Get the size of the map represented by this LazyMap.
+     * 
+     * @return The size of the map
+     */
+    public int getMapSize() {
+        if (!parsed) {
+            parse();
+        }
+        return mapSize;
+    }
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyNonPrimitive.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyNonPrimitive.java
index b151f2d..f7ae1e3 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyNonPrimitive.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyNonPrimitive.java
@@ -24,55 +24,54 @@
 /**
  * LazyPrimitive stores a primitive Object in a LazyObject.
  */
-public abstract class LazyNonPrimitive<OI extends ObjectInspector> extends
-		LazyObject<OI> {
+public abstract class LazyNonPrimitive<OI extends ObjectInspector> extends LazyObject<OI> {
 
-	protected byte[] bytes;
-	protected int start;
-	protected int length;
+    protected byte[] bytes;
+    protected int start;
+    protected int length;
 
-	/**
-	 * Create a LazyNonPrimitive object with the specified ObjectInspector.
-	 * 
-	 * @param oi
-	 *            The ObjectInspector would have to have a hierarchy of
-	 *            LazyObjectInspectors with the leaf nodes being
-	 *            WritableObjectInspectors. It's used both for accessing the
-	 *            type hierarchy of the complex object, as well as getting meta
-	 *            information (separator, nullSequence, etc) when parsing the
-	 *            lazy object.
-	 */
-	protected LazyNonPrimitive(OI oi) {
-		super(oi);
-		bytes = null;
-		start = 0;
-		length = 0;
-	}
+    /**
+     * Create a LazyNonPrimitive object with the specified ObjectInspector.
+     * 
+     * @param oi
+     *            The ObjectInspector would have to have a hierarchy of
+     *            LazyObjectInspectors with the leaf nodes being
+     *            WritableObjectInspectors. It's used both for accessing the
+     *            type hierarchy of the complex object, as well as getting meta
+     *            information (separator, nullSequence, etc) when parsing the
+     *            lazy object.
+     */
+    protected LazyNonPrimitive(OI oi) {
+        super(oi);
+        bytes = null;
+        start = 0;
+        length = 0;
+    }
 
-	@Override
-	public void init(byte[] bytes, int start, int length) {
-		if (bytes == null) {
-			throw new RuntimeException("bytes cannot be null!");
-		}
-		this.bytes = bytes;
-		this.start = start;
-		this.length = length;
-		assert start >= 0;
-		assert start + length <= bytes.length;
-	}
+    @Override
+    public void init(byte[] bytes, int start, int length) {
+        if (bytes == null) {
+            throw new RuntimeException("bytes cannot be null!");
+        }
+        this.bytes = bytes;
+        this.start = start;
+        this.length = length;
+        assert start >= 0;
+        assert start + length <= bytes.length;
+    }
 
-	@Override
-	public Object getObject() {
-		return this;
-	}
+    @Override
+    public Object getObject() {
+        return this;
+    }
 
-	@Override
-	public int hashCode() {
-		return LazyUtils.hashBytes(bytes, start, length);
-	}
+    @Override
+    public int hashCode() {
+        return LazyUtils.hashBytes(bytes, start, length);
+    }
 
-	@Override
-	public void init(IFrameTupleReference tuple) {
-	}
+    @Override
+    public void init(IFrameTupleReference tuple) {
+    }
 
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyObject.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyObject.java
index 9aaaa88..dc1dc60 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyObject.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyObject.java
@@ -23,56 +23,55 @@
 
 /**
  * LazyObject stores an object in a range of bytes in a byte[].
- * 
  * A LazyObject can represent any primitive object or hierarchical object like
  * array, map or struct.
  */
 public abstract class LazyObject<OI extends ObjectInspector> {
 
-	OI oi;
+    OI oi;
 
-	/**
-	 * Create a LazyObject.
-	 * 
-	 * @param oi
-	 *            Derived classes can access meta information about this Lazy
-	 *            Object (e.g, separator, nullSequence, escaper) from it.
-	 */
-	protected LazyObject(OI oi) {
-		this.oi = oi;
-	}
+    /**
+     * Create a LazyObject.
+     * 
+     * @param oi
+     *            Derived classes can access meta information about this Lazy
+     *            Object (e.g, separator, nullSequence, escaper) from it.
+     */
+    protected LazyObject(OI oi) {
+        this.oi = oi;
+    }
 
-	/**
-	 * Set the data for this LazyObject. We take ByteArrayRef instead of byte[]
-	 * so that we will be able to drop the reference to byte[] by a single
-	 * assignment. The ByteArrayRef object can be reused across multiple rows.
-	 * 
-	 * @param bytes
-	 *            The wrapper of the byte[].
-	 * @param start
-	 *            The start position inside the bytes.
-	 * @param length
-	 *            The length of the data, starting from "start"
-	 * @see ByteArrayRef
-	 */
-	public abstract void init(byte[] bytes, int start, int length);
+    /**
+     * Set the data for this LazyObject. We take ByteArrayRef instead of byte[]
+     * so that we will be able to drop the reference to byte[] by a single
+     * assignment. The ByteArrayRef object can be reused across multiple rows.
+     * 
+     * @param bytes
+     *            The wrapper of the byte[].
+     * @param start
+     *            The start position inside the bytes.
+     * @param length
+     *            The length of the data, starting from "start"
+     * @see ByteArrayRef
+     */
+    public abstract void init(byte[] bytes, int start, int length);
 
-	public abstract void init(IFrameTupleReference tuple);
+    public abstract void init(IFrameTupleReference tuple);
 
-	/**
-	 * If the LazyObject is a primitive Object, then deserialize it and return
-	 * the actual primitive Object. Otherwise (array, map, struct), return this.
-	 */
-	public abstract Object getObject();
+    /**
+     * If the LazyObject is a primitive Object, then deserialize it and return
+     * the actual primitive Object. Otherwise (array, map, struct), return this.
+     */
+    public abstract Object getObject();
 
-	@Override
-	public abstract int hashCode();
+    @Override
+    public abstract int hashCode();
 
-	protected OI getInspector() {
-		return oi;
-	}
+    protected OI getInspector() {
+        return oi;
+    }
 
-	protected void setInspector(OI oi) {
-		this.oi = oi;
-	}
+    protected void setInspector(OI oi) {
+        this.oi = oi;
+    }
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyPrimitive.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyPrimitive.java
index 888e5b2..8139c65 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyPrimitive.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyPrimitive.java
@@ -25,46 +25,45 @@
 /**
  * LazyPrimitive stores a primitive Object in a LazyObject.
  */
-public abstract class LazyPrimitive<OI extends ObjectInspector, T extends Writable>
-		extends LazyObject<OI> {
+public abstract class LazyPrimitive<OI extends ObjectInspector, T extends Writable> extends LazyObject<OI> {
 
-	LazyPrimitive(OI oi) {
-		super(oi);
-	}
+    LazyPrimitive(OI oi) {
+        super(oi);
+    }
 
-	LazyPrimitive(LazyPrimitive<OI, T> copy) {
-		super(copy.oi);
-		isNull = copy.isNull;
-	}
+    LazyPrimitive(LazyPrimitive<OI, T> copy) {
+        super(copy.oi);
+        isNull = copy.isNull;
+    }
 
-	T data;
-	boolean isNull = false;
+    T data;
+    boolean isNull = false;
 
-	/**
-	 * Returns the primitive object represented by this LazyObject. This is
-	 * useful because it can make sure we have "null" for null objects.
-	 */
-	@Override
-	public Object getObject() {
-		return isNull ? null : this;
-	}
+    /**
+     * Returns the primitive object represented by this LazyObject. This is
+     * useful because it can make sure we have "null" for null objects.
+     */
+    @Override
+    public Object getObject() {
+        return isNull ? null : this;
+    }
 
-	public T getWritableObject() {
-		return isNull ? null : data;
-	}
+    public T getWritableObject() {
+        return isNull ? null : data;
+    }
 
-	@Override
-	public String toString() {
-		return isNull ? "null" : data.toString();
-	}
+    @Override
+    public String toString() {
+        return isNull ? "null" : data.toString();
+    }
 
-	@Override
-	public int hashCode() {
-		return isNull ? 0 : data.hashCode();
-	}
+    @Override
+    public int hashCode() {
+        return isNull ? 0 : data.hashCode();
+    }
 
-	@Override
-	public void init(IFrameTupleReference tuple) {
-	}
+    @Override
+    public void init(IFrameTupleReference tuple) {
+    }
 
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazySerDe.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazySerDe.java
index 4d0dff6..05b82ba 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazySerDe.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazySerDe.java
@@ -62,416 +62,399 @@
  */
 public class LazySerDe implements SerDe {
 
-	public static final Log LOG = LogFactory.getLog(LazySerDe.class.getName());
+    public static final Log LOG = LogFactory.getLog(LazySerDe.class.getName());
 
-	public LazySerDe() {
-	}
+    public LazySerDe() {
+    }
 
-	List<String> columnNames;
-	List<TypeInfo> columnTypes;
+    List<String> columnNames;
+    List<TypeInfo> columnTypes;
 
-	TypeInfo rowTypeInfo;
-	ObjectInspector cachedObjectInspector;
+    TypeInfo rowTypeInfo;
+    ObjectInspector cachedObjectInspector;
 
-	// The object for storing row data
-	LazyColumnar cachedLazyStruct;
+    // The object for storing row data
+    LazyColumnar cachedLazyStruct;
 
-	/**
-	 * Initialize the SerDe with configuration and table information.
-	 */
-	@Override
-	public void initialize(Configuration conf, Properties tbl)
-			throws SerDeException {
-		// Get column names and types
-		String columnNameProperty = tbl.getProperty(Constants.LIST_COLUMNS);
-		String columnTypeProperty = tbl
-				.getProperty(Constants.LIST_COLUMN_TYPES);
-		if (columnNameProperty.length() == 0) {
-			columnNames = new ArrayList<String>();
-		} else {
-			columnNames = Arrays.asList(columnNameProperty.split(","));
-		}
-		if (columnTypeProperty.length() == 0) {
-			columnTypes = new ArrayList<TypeInfo>();
-		} else {
-			columnTypes = TypeInfoUtils
-					.getTypeInfosFromTypeString(columnTypeProperty);
-		}
-		assert (columnNames.size() == columnTypes.size());
-		// Create row related objects
-		rowTypeInfo = TypeInfoFactory.getStructTypeInfo(columnNames,
-				columnTypes);
-		// Create the object inspector and the lazy binary struct object
-		cachedObjectInspector = LazyUtils.getLazyObjectInspectorFromTypeInfo(
-				rowTypeInfo, true);
-		cachedLazyStruct = (LazyColumnar) LazyFactory
-				.createLazyObject(cachedObjectInspector);
-		// output debug info
-		LOG.debug("LazySerDe initialized with: columnNames=" + columnNames
-				+ " columnTypes=" + columnTypes);
-	}
+    /**
+     * Initialize the SerDe with configuration and table information.
+     */
+    @Override
+    public void initialize(Configuration conf, Properties tbl) throws SerDeException {
+        // Get column names and types
+        String columnNameProperty = tbl.getProperty(Constants.LIST_COLUMNS);
+        String columnTypeProperty = tbl.getProperty(Constants.LIST_COLUMN_TYPES);
+        if (columnNameProperty.length() == 0) {
+            columnNames = new ArrayList<String>();
+        } else {
+            columnNames = Arrays.asList(columnNameProperty.split(","));
+        }
+        if (columnTypeProperty.length() == 0) {
+            columnTypes = new ArrayList<TypeInfo>();
+        } else {
+            columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(columnTypeProperty);
+        }
+        assert (columnNames.size() == columnTypes.size());
+        // Create row related objects
+        rowTypeInfo = TypeInfoFactory.getStructTypeInfo(columnNames, columnTypes);
+        // Create the object inspector and the lazy binary struct object
+        cachedObjectInspector = LazyUtils.getLazyObjectInspectorFromTypeInfo(rowTypeInfo, true);
+        cachedLazyStruct = (LazyColumnar) LazyFactory.createLazyObject(cachedObjectInspector);
+        // output debug info
+        LOG.debug("LazySerDe initialized with: columnNames=" + columnNames + " columnTypes=" + columnTypes);
+    }
 
-	/**
-	 * Returns the ObjectInspector for the row.
-	 */
-	@Override
-	public ObjectInspector getObjectInspector() throws SerDeException {
-		return cachedObjectInspector;
-	}
+    /**
+     * Returns the ObjectInspector for the row.
+     */
+    @Override
+    public ObjectInspector getObjectInspector() throws SerDeException {
+        return cachedObjectInspector;
+    }
 
-	/**
-	 * Returns the Writable Class after serialization.
-	 */
-	@Override
-	public Class<? extends Writable> getSerializedClass() {
-		return BytesWritable.class;
-	}
+    /**
+     * Returns the Writable Class after serialization.
+     */
+    @Override
+    public Class<? extends Writable> getSerializedClass() {
+        return BytesWritable.class;
+    }
 
-	// The wrapper for byte array
-	ByteArrayRef byteArrayRef;
+    // The wrapper for byte array
+    ByteArrayRef byteArrayRef;
 
-	/**
-	 * Deserialize a table record to a Lazy struct.
-	 */
-	@SuppressWarnings("deprecation")
-	@Override
-	public Object deserialize(Writable field) throws SerDeException {
-		if (byteArrayRef == null) {
-			byteArrayRef = new ByteArrayRef();
-		}
-		if (field instanceof BytesWritable) {
-			BytesWritable b = (BytesWritable) field;
-			if (b.getSize() == 0) {
-				return null;
-			}
-			// For backward-compatibility with hadoop 0.17
-			byteArrayRef.setData(b.get());
-			cachedLazyStruct.init(byteArrayRef.getData(), 0, b.getSize());
-		} else if (field instanceof Text) {
-			Text t = (Text) field;
-			if (t.getLength() == 0) {
-				return null;
-			}
-			byteArrayRef.setData(t.getBytes());
-			cachedLazyStruct.init(byteArrayRef.getData(), 0, t.getLength());
-		} else {
-			throw new SerDeException(getClass().toString()
-					+ ": expects either BytesWritable or Text object!");
-		}
-		return cachedLazyStruct;
-	}
+    /**
+     * Deserialize a table record to a Lazy struct.
+     */
+    @SuppressWarnings("deprecation")
+    @Override
+    public Object deserialize(Writable field) throws SerDeException {
+        if (byteArrayRef == null) {
+            byteArrayRef = new ByteArrayRef();
+        }
+        if (field instanceof BytesWritable) {
+            BytesWritable b = (BytesWritable) field;
+            if (b.getSize() == 0) {
+                return null;
+            }
+            // For backward-compatibility with hadoop 0.17
+            byteArrayRef.setData(b.get());
+            cachedLazyStruct.init(byteArrayRef.getData(), 0, b.getSize());
+        } else if (field instanceof Text) {
+            Text t = (Text) field;
+            if (t.getLength() == 0) {
+                return null;
+            }
+            byteArrayRef.setData(t.getBytes());
+            cachedLazyStruct.init(byteArrayRef.getData(), 0, t.getLength());
+        } else {
+            throw new SerDeException(getClass().toString() + ": expects either BytesWritable or Text object!");
+        }
+        return cachedLazyStruct;
+    }
 
-	/**
-	 * The reusable output buffer and serialize byte buffer.
-	 */
-	BytesWritable serializeBytesWritable = new BytesWritable();
-	ByteStream.Output serializeByteStream = new ByteStream.Output();
+    /**
+     * The reusable output buffer and serialize byte buffer.
+     */
+    BytesWritable serializeBytesWritable = new BytesWritable();
+    ByteStream.Output serializeByteStream = new ByteStream.Output();
 
-	/**
-	 * Serialize an object to a byte buffer in a binary compact way.
-	 */
-	@Override
-	public Writable serialize(Object obj, ObjectInspector objInspector)
-			throws SerDeException {
-		// make sure it is a struct record or not
-		serializeByteStream.reset();
+    /**
+     * Serialize an object to a byte buffer in a binary compact way.
+     */
+    @Override
+    public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
+        // make sure it is a struct record or not
+        serializeByteStream.reset();
 
-		if (objInspector.getCategory() != Category.STRUCT) {
-			// serialize the primitive object
-			serialize(serializeByteStream, obj, objInspector);
-		} else {
-			// serialize the row as a struct
-			serializeStruct(serializeByteStream, obj,
-					(StructObjectInspector) objInspector);
-		}
-		// return the serialized bytes
-		serializeBytesWritable.set(serializeByteStream.getData(), 0,
-				serializeByteStream.getCount());
-		return serializeBytesWritable;
-	}
+        if (objInspector.getCategory() != Category.STRUCT) {
+            // serialize the primitive object
+            serialize(serializeByteStream, obj, objInspector);
+        } else {
+            // serialize the row as a struct
+            serializeStruct(serializeByteStream, obj, (StructObjectInspector) objInspector);
+        }
+        // return the serialized bytes
+        serializeBytesWritable.set(serializeByteStream.getData(), 0, serializeByteStream.getCount());
+        return serializeBytesWritable;
+    }
 
-	boolean nullMapKey = false;
+    boolean nullMapKey = false;
 
-	/**
-	 * Serialize a struct object without writing the byte size. This function is
-	 * shared by both row serialization and struct serialization.
-	 * 
-	 * @param byteStream
-	 *            the byte stream storing the serialization data
-	 * @param obj
-	 *            the struct object to serialize
-	 * @param objInspector
-	 *            the struct object inspector
-	 */
-	private void serializeStruct(Output byteStream, Object obj,
-			StructObjectInspector soi) {
-		// do nothing for null struct
-		if (null == obj) {
-			return;
-		}
-		/*
-		 * Interleave serializing one null byte and 8 struct fields in each
-		 * round, in order to support data deserialization with different table
-		 * schemas
-		 */
-		List<? extends StructField> fields = soi.getAllStructFieldRefs();
-		int size = fields.size();
-		int lasti = 0;
-		byte nullByte = 0;
-		for (int i = 0; i < size; i++) {
-			// set bit to 1 if a field is not null
-			if (null != soi.getStructFieldData(obj, fields.get(i))) {
-				nullByte |= 1 << (i % 8);
-			}
-			// write the null byte every eight elements or
-			// if this is the last element and serialize the
-			// corresponding 8 struct fields at the same time
-			if (7 == i % 8 || i == size - 1) {
-				serializeByteStream.write(nullByte);
-				for (int j = lasti; j <= i; j++) {
-					serialize(serializeByteStream, soi.getStructFieldData(obj,
-							fields.get(j)), fields.get(j)
-							.getFieldObjectInspector());
-				}
-				lasti = i + 1;
-				nullByte = 0;
-			}
-		}
-	}
+    /**
+     * Serialize a struct object without writing the byte size. This function is
+     * shared by both row serialization and struct serialization.
+     * 
+     * @param byteStream
+     *            the byte stream storing the serialization data
+     * @param obj
+     *            the struct object to serialize
+     * @param objInspector
+     *            the struct object inspector
+     */
+    private void serializeStruct(Output byteStream, Object obj, StructObjectInspector soi) {
+        // do nothing for null struct
+        if (null == obj) {
+            return;
+        }
+        /*
+         * Interleave serializing one null byte and 8 struct fields in each
+         * round, in order to support data deserialization with different table
+         * schemas
+         */
+        List<? extends StructField> fields = soi.getAllStructFieldRefs();
+        int size = fields.size();
+        int lasti = 0;
+        byte nullByte = 0;
+        for (int i = 0; i < size; i++) {
+            // set bit to 1 if a field is not null
+            if (null != soi.getStructFieldData(obj, fields.get(i))) {
+                nullByte |= 1 << (i % 8);
+            }
+            // write the null byte every eight elements or
+            // if this is the last element and serialize the
+            // corresponding 8 struct fields at the same time
+            if (7 == i % 8 || i == size - 1) {
+                serializeByteStream.write(nullByte);
+                for (int j = lasti; j <= i; j++) {
+                    serialize(serializeByteStream, soi.getStructFieldData(obj, fields.get(j)), fields.get(j)
+                            .getFieldObjectInspector());
+                }
+                lasti = i + 1;
+                nullByte = 0;
+            }
+        }
+    }
 
-	/**
-	 * A recursive function that serialize an object to a byte buffer based on
-	 * its object inspector.
-	 * 
-	 * @param byteStream
-	 *            the byte stream storing the serialization data
-	 * @param obj
-	 *            the object to serialize
-	 * @param objInspector
-	 *            the object inspector
-	 */
-	private void serialize(Output byteStream, Object obj,
-			ObjectInspector objInspector) {
+    /**
+     * A recursive function that serialize an object to a byte buffer based on
+     * its object inspector.
+     * 
+     * @param byteStream
+     *            the byte stream storing the serialization data
+     * @param obj
+     *            the object to serialize
+     * @param objInspector
+     *            the object inspector
+     */
+    private void serialize(Output byteStream, Object obj, ObjectInspector objInspector) {
 
-		// do nothing for null object
-		if (null == obj) {
-			return;
-		}
+        // do nothing for null object
+        if (null == obj) {
+            return;
+        }
 
-		switch (objInspector.getCategory()) {
-		case PRIMITIVE: {
-			PrimitiveObjectInspector poi = (PrimitiveObjectInspector) objInspector;
-			switch (poi.getPrimitiveCategory()) {
-			case VOID: {
-				return;
-			}
-			case BOOLEAN: {
-				boolean v = ((BooleanObjectInspector) poi).get(obj);
-				byteStream.write((byte) (v ? 1 : 0));
-				return;
-			}
-			case BYTE: {
-				ByteObjectInspector boi = (ByteObjectInspector) poi;
-				byte v = boi.get(obj);
-				byteStream.write(v);
-				return;
-			}
-			case SHORT: {
-				ShortObjectInspector spoi = (ShortObjectInspector) poi;
-				short v = spoi.get(obj);
-				byteStream.write((byte) (v >> 8));
-				byteStream.write((byte) (v));
-				return;
-			}
-			case INT: {
-				IntObjectInspector ioi = (IntObjectInspector) poi;
-				int v = ioi.get(obj);
-				LazyUtils.writeVInt(byteStream, v);
-				return;
-			}
-			case LONG: {
-				LongObjectInspector loi = (LongObjectInspector) poi;
-				long v = loi.get(obj);
-				LazyUtils.writeVLong(byteStream, v);
-				return;
-			}
-			case FLOAT: {
-				FloatObjectInspector foi = (FloatObjectInspector) poi;
-				int v = Float.floatToIntBits(foi.get(obj));
-				byteStream.write((byte) (v >> 24));
-				byteStream.write((byte) (v >> 16));
-				byteStream.write((byte) (v >> 8));
-				byteStream.write((byte) (v));
-				return;
-			}
-			case DOUBLE: {
-				DoubleObjectInspector doi = (DoubleObjectInspector) poi;
-				long v = Double.doubleToLongBits(doi.get(obj));
-				byteStream.write((byte) (v >> 56));
-				byteStream.write((byte) (v >> 48));
-				byteStream.write((byte) (v >> 40));
-				byteStream.write((byte) (v >> 32));
-				byteStream.write((byte) (v >> 24));
-				byteStream.write((byte) (v >> 16));
-				byteStream.write((byte) (v >> 8));
-				byteStream.write((byte) (v));
-				return;
-			}
-			case STRING: {
-				StringObjectInspector soi = (StringObjectInspector) poi;
-				Text t = soi.getPrimitiveWritableObject(obj);
-				/* write byte size of the string which is a vint */
-				int length = t.getLength();
-				LazyUtils.writeVInt(byteStream, length);
-				/* write string itself */
-				byte[] data = t.getBytes();
-				byteStream.write(data, 0, length);
-				return;
-			}
-			default: {
-				throw new RuntimeException("Unrecognized type: "
-						+ poi.getPrimitiveCategory());
-			}
-			}
-		}
-		case LIST: {
-			ListObjectInspector loi = (ListObjectInspector) objInspector;
-			ObjectInspector eoi = loi.getListElementObjectInspector();
+        switch (objInspector.getCategory()) {
+            case PRIMITIVE: {
+                PrimitiveObjectInspector poi = (PrimitiveObjectInspector) objInspector;
+                switch (poi.getPrimitiveCategory()) {
+                    case VOID: {
+                        return;
+                    }
+                    case BOOLEAN: {
+                        boolean v = ((BooleanObjectInspector) poi).get(obj);
+                        byteStream.write((byte) (v ? 1 : 0));
+                        return;
+                    }
+                    case BYTE: {
+                        ByteObjectInspector boi = (ByteObjectInspector) poi;
+                        byte v = boi.get(obj);
+                        byteStream.write(v);
+                        return;
+                    }
+                    case SHORT: {
+                        ShortObjectInspector spoi = (ShortObjectInspector) poi;
+                        short v = spoi.get(obj);
+                        byteStream.write((byte) (v >> 8));
+                        byteStream.write((byte) (v));
+                        return;
+                    }
+                    case INT: {
+                        IntObjectInspector ioi = (IntObjectInspector) poi;
+                        int v = ioi.get(obj);
+                        LazyUtils.writeVInt(byteStream, v);
+                        return;
+                    }
+                    case LONG: {
+                        LongObjectInspector loi = (LongObjectInspector) poi;
+                        long v = loi.get(obj);
+                        LazyUtils.writeVLong(byteStream, v);
+                        return;
+                    }
+                    case FLOAT: {
+                        FloatObjectInspector foi = (FloatObjectInspector) poi;
+                        int v = Float.floatToIntBits(foi.get(obj));
+                        byteStream.write((byte) (v >> 24));
+                        byteStream.write((byte) (v >> 16));
+                        byteStream.write((byte) (v >> 8));
+                        byteStream.write((byte) (v));
+                        return;
+                    }
+                    case DOUBLE: {
+                        DoubleObjectInspector doi = (DoubleObjectInspector) poi;
+                        long v = Double.doubleToLongBits(doi.get(obj));
+                        byteStream.write((byte) (v >> 56));
+                        byteStream.write((byte) (v >> 48));
+                        byteStream.write((byte) (v >> 40));
+                        byteStream.write((byte) (v >> 32));
+                        byteStream.write((byte) (v >> 24));
+                        byteStream.write((byte) (v >> 16));
+                        byteStream.write((byte) (v >> 8));
+                        byteStream.write((byte) (v));
+                        return;
+                    }
+                    case STRING: {
+                        StringObjectInspector soi = (StringObjectInspector) poi;
+                        Text t = soi.getPrimitiveWritableObject(obj);
+                        /* write byte size of the string which is a vint */
+                        int length = t.getLength();
+                        LazyUtils.writeVInt(byteStream, length);
+                        /* write string itself */
+                        byte[] data = t.getBytes();
+                        byteStream.write(data, 0, length);
+                        return;
+                    }
+                    default: {
+                        throw new RuntimeException("Unrecognized type: " + poi.getPrimitiveCategory());
+                    }
+                }
+            }
+            case LIST: {
+                ListObjectInspector loi = (ListObjectInspector) objInspector;
+                ObjectInspector eoi = loi.getListElementObjectInspector();
 
-			// 1/ reserve spaces for the byte size of the list
-			// which is a integer and takes four bytes
-			int byteSizeStart = byteStream.getCount();
-			byteStream.write((byte) 0);
-			byteStream.write((byte) 0);
-			byteStream.write((byte) 0);
-			byteStream.write((byte) 0);
-			int listStart = byteStream.getCount();
+                // 1/ reserve spaces for the byte size of the list
+                // which is a integer and takes four bytes
+                int byteSizeStart = byteStream.getCount();
+                byteStream.write((byte) 0);
+                byteStream.write((byte) 0);
+                byteStream.write((byte) 0);
+                byteStream.write((byte) 0);
+                int listStart = byteStream.getCount();
 
-			// 2/ write the size of the list as a VInt
-			int size = loi.getListLength(obj);
-			LazyUtils.writeVInt(byteStream, size);
+                // 2/ write the size of the list as a VInt
+                int size = loi.getListLength(obj);
+                LazyUtils.writeVInt(byteStream, size);
 
-			// 3/ write the null bytes
-			byte nullByte = 0;
-			for (int eid = 0; eid < size; eid++) {
-				// set the bit to 1 if an element is not null
-				if (null != loi.getListElement(obj, eid)) {
-					nullByte |= 1 << (eid % 8);
-				}
-				// store the byte every eight elements or
-				// if this is the last element
-				if (7 == eid % 8 || eid == size - 1) {
-					byteStream.write(nullByte);
-					nullByte = 0;
-				}
-			}
+                // 3/ write the null bytes
+                byte nullByte = 0;
+                for (int eid = 0; eid < size; eid++) {
+                    // set the bit to 1 if an element is not null
+                    if (null != loi.getListElement(obj, eid)) {
+                        nullByte |= 1 << (eid % 8);
+                    }
+                    // store the byte every eight elements or
+                    // if this is the last element
+                    if (7 == eid % 8 || eid == size - 1) {
+                        byteStream.write(nullByte);
+                        nullByte = 0;
+                    }
+                }
 
-			// 4/ write element by element from the list
-			for (int eid = 0; eid < size; eid++) {
-				serialize(byteStream, loi.getListElement(obj, eid), eoi);
-			}
+                // 4/ write element by element from the list
+                for (int eid = 0; eid < size; eid++) {
+                    serialize(byteStream, loi.getListElement(obj, eid), eoi);
+                }
 
-			// 5/ update the list byte size
-			int listEnd = byteStream.getCount();
-			int listSize = listEnd - listStart;
-			byte[] bytes = byteStream.getData();
-			bytes[byteSizeStart] = (byte) (listSize >> 24);
-			bytes[byteSizeStart + 1] = (byte) (listSize >> 16);
-			bytes[byteSizeStart + 2] = (byte) (listSize >> 8);
-			bytes[byteSizeStart + 3] = (byte) (listSize);
+                // 5/ update the list byte size
+                int listEnd = byteStream.getCount();
+                int listSize = listEnd - listStart;
+                byte[] bytes = byteStream.getData();
+                bytes[byteSizeStart] = (byte) (listSize >> 24);
+                bytes[byteSizeStart + 1] = (byte) (listSize >> 16);
+                bytes[byteSizeStart + 2] = (byte) (listSize >> 8);
+                bytes[byteSizeStart + 3] = (byte) (listSize);
 
-			return;
-		}
-		case MAP: {
-			MapObjectInspector moi = (MapObjectInspector) objInspector;
-			ObjectInspector koi = moi.getMapKeyObjectInspector();
-			ObjectInspector voi = moi.getMapValueObjectInspector();
-			Map<?, ?> map = moi.getMap(obj);
+                return;
+            }
+            case MAP: {
+                MapObjectInspector moi = (MapObjectInspector) objInspector;
+                ObjectInspector koi = moi.getMapKeyObjectInspector();
+                ObjectInspector voi = moi.getMapValueObjectInspector();
+                Map<?, ?> map = moi.getMap(obj);
 
-			// 1/ reserve spaces for the byte size of the map
-			// which is a integer and takes four bytes
-			int byteSizeStart = byteStream.getCount();
-			byteStream.write((byte) 0);
-			byteStream.write((byte) 0);
-			byteStream.write((byte) 0);
-			byteStream.write((byte) 0);
-			int mapStart = byteStream.getCount();
+                // 1/ reserve spaces for the byte size of the map
+                // which is a integer and takes four bytes
+                int byteSizeStart = byteStream.getCount();
+                byteStream.write((byte) 0);
+                byteStream.write((byte) 0);
+                byteStream.write((byte) 0);
+                byteStream.write((byte) 0);
+                int mapStart = byteStream.getCount();
 
-			// 2/ write the size of the map which is a VInt
-			int size = map.size();
-			LazyUtils.writeVInt(byteStream, size);
+                // 2/ write the size of the map which is a VInt
+                int size = map.size();
+                LazyUtils.writeVInt(byteStream, size);
 
-			// 3/ write the null bytes
-			int b = 0;
-			byte nullByte = 0;
-			for (Map.Entry<?, ?> entry : map.entrySet()) {
-				// set the bit to 1 if a key is not null
-				if (null != entry.getKey()) {
-					nullByte |= 1 << (b % 8);
-				} else if (!nullMapKey) {
-					nullMapKey = true;
-					LOG.warn("Null map key encountered! Ignoring similar problems.");
-				}
-				b++;
-				// set the bit to 1 if a value is not null
-				if (null != entry.getValue()) {
-					nullByte |= 1 << (b % 8);
-				}
-				b++;
-				// write the byte to stream every 4 key-value pairs
-				// or if this is the last key-value pair
-				if (0 == b % 8 || b == size * 2) {
-					byteStream.write(nullByte);
-					nullByte = 0;
-				}
-			}
+                // 3/ write the null bytes
+                int b = 0;
+                byte nullByte = 0;
+                for (Map.Entry<?, ?> entry : map.entrySet()) {
+                    // set the bit to 1 if a key is not null
+                    if (null != entry.getKey()) {
+                        nullByte |= 1 << (b % 8);
+                    } else if (!nullMapKey) {
+                        nullMapKey = true;
+                        LOG.warn("Null map key encountered! Ignoring similar problems.");
+                    }
+                    b++;
+                    // set the bit to 1 if a value is not null
+                    if (null != entry.getValue()) {
+                        nullByte |= 1 << (b % 8);
+                    }
+                    b++;
+                    // write the byte to stream every 4 key-value pairs
+                    // or if this is the last key-value pair
+                    if (0 == b % 8 || b == size * 2) {
+                        byteStream.write(nullByte);
+                        nullByte = 0;
+                    }
+                }
 
-			// 4/ write key-value pairs one by one
-			for (Map.Entry<?, ?> entry : map.entrySet()) {
-				serialize(byteStream, entry.getKey(), koi);
-				serialize(byteStream, entry.getValue(), voi);
-			}
+                // 4/ write key-value pairs one by one
+                for (Map.Entry<?, ?> entry : map.entrySet()) {
+                    serialize(byteStream, entry.getKey(), koi);
+                    serialize(byteStream, entry.getValue(), voi);
+                }
 
-			// 5/ update the byte size of the map
-			int mapEnd = byteStream.getCount();
-			int mapSize = mapEnd - mapStart;
-			byte[] bytes = byteStream.getData();
-			bytes[byteSizeStart] = (byte) (mapSize >> 24);
-			bytes[byteSizeStart + 1] = (byte) (mapSize >> 16);
-			bytes[byteSizeStart + 2] = (byte) (mapSize >> 8);
-			bytes[byteSizeStart + 3] = (byte) (mapSize);
+                // 5/ update the byte size of the map
+                int mapEnd = byteStream.getCount();
+                int mapSize = mapEnd - mapStart;
+                byte[] bytes = byteStream.getData();
+                bytes[byteSizeStart] = (byte) (mapSize >> 24);
+                bytes[byteSizeStart + 1] = (byte) (mapSize >> 16);
+                bytes[byteSizeStart + 2] = (byte) (mapSize >> 8);
+                bytes[byteSizeStart + 3] = (byte) (mapSize);
 
-			return;
-		}
-		case STRUCT: {
-			// 1/ reserve spaces for the byte size of the struct
-			// which is a integer and takes four bytes
-			int byteSizeStart = byteStream.getCount();
-			byteStream.write((byte) 0);
-			byteStream.write((byte) 0);
-			byteStream.write((byte) 0);
-			byteStream.write((byte) 0);
-			int structStart = byteStream.getCount();
+                return;
+            }
+            case STRUCT: {
+                // 1/ reserve spaces for the byte size of the struct
+                // which is a integer and takes four bytes
+                int byteSizeStart = byteStream.getCount();
+                byteStream.write((byte) 0);
+                byteStream.write((byte) 0);
+                byteStream.write((byte) 0);
+                byteStream.write((byte) 0);
+                int structStart = byteStream.getCount();
 
-			// 2/ serialize the struct
-			serializeStruct(byteStream, obj,
-					(StructObjectInspector) objInspector);
+                // 2/ serialize the struct
+                serializeStruct(byteStream, obj, (StructObjectInspector) objInspector);
 
-			// 3/ update the byte size of the struct
-			int structEnd = byteStream.getCount();
-			int structSize = structEnd - structStart;
-			byte[] bytes = byteStream.getData();
-			bytes[byteSizeStart] = (byte) (structSize >> 24);
-			bytes[byteSizeStart + 1] = (byte) (structSize >> 16);
-			bytes[byteSizeStart + 2] = (byte) (structSize >> 8);
-			bytes[byteSizeStart + 3] = (byte) (structSize);
+                // 3/ update the byte size of the struct
+                int structEnd = byteStream.getCount();
+                int structSize = structEnd - structStart;
+                byte[] bytes = byteStream.getData();
+                bytes[byteSizeStart] = (byte) (structSize >> 24);
+                bytes[byteSizeStart + 1] = (byte) (structSize >> 16);
+                bytes[byteSizeStart + 2] = (byte) (structSize >> 8);
+                bytes[byteSizeStart + 3] = (byte) (structSize);
 
-			return;
-		}
-		default: {
-			throw new RuntimeException("Unrecognized type: "
-					+ objInspector.getCategory());
-		}
-		}
-	}
+                return;
+            }
+            default: {
+                throw new RuntimeException("Unrecognized type: " + objInspector.getCategory());
+            }
+        }
+    }
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyShort.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyShort.java
index 7484b72..f493b37 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyShort.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyShort.java
@@ -23,40 +23,32 @@
 
 /**
  * LazyObject for storing a value of Short.
- * 
  * <p>
- * Part of the code is adapted from Apache Harmony Project.
- * 
- * As with the specification, this implementation relied on code laid out in <a
- * href="http://www.hackersdelight.org/">Henry S. Warren, Jr.'s Hacker's
- * Delight, (Addison Wesley, 2002)</a> as well as <a
- * href="http://aggregate.org/MAGIC/">The Aggregate's Magic Algorithms</a>.
+ * Part of the code is adapted from Apache Harmony Project. As with the specification, this implementation relied on code laid out in <a href="http://www.hackersdelight.org/">Henry S. Warren, Jr.'s Hacker's Delight, (Addison Wesley, 2002)</a> as well as <a href="http://aggregate.org/MAGIC/">The Aggregate's Magic Algorithms</a>.
  * </p>
- * 
  */
-public class LazyShort extends
-		LazyPrimitive<LazyShortObjectInspector, ShortWritable> {
+public class LazyShort extends LazyPrimitive<LazyShortObjectInspector, ShortWritable> {
 
-	public LazyShort(LazyShortObjectInspector oi) {
-		super(oi);
-		data = new ShortWritable();
-	}
+    public LazyShort(LazyShortObjectInspector oi) {
+        super(oi);
+        data = new ShortWritable();
+    }
 
-	public LazyShort(LazyShort copy) {
-		super(copy);
-		data = new ShortWritable(copy.data.get());
-	}
+    public LazyShort(LazyShort copy) {
+        super(copy);
+        data = new ShortWritable(copy.data.get());
+    }
 
-	@Override
-	public void init(byte[] bytes, int start, int length) {
-		if (length == 0) {
-			isNull = true;
-			return;
-		} else
-			isNull = false;
+    @Override
+    public void init(byte[] bytes, int start, int length) {
+        if (length == 0) {
+            isNull = true;
+            return;
+        } else
+            isNull = false;
 
-		assert (2 == length);
-		data.set(LazyUtils.byteArrayToShort(bytes, start));
-	}
+        assert (2 == length);
+        data.set(LazyUtils.byteArrayToShort(bytes, start));
+    }
 
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyString.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyString.java
index c13533b..0293af8 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyString.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyString.java
@@ -27,34 +27,33 @@
  */
 public class LazyString extends LazyPrimitive<LazyStringObjectInspector, Text> {
 
-	public LazyString(LazyStringObjectInspector oi) {
-		super(oi);
-		data = new Text();
-	}
+    public LazyString(LazyStringObjectInspector oi) {
+        super(oi);
+        data = new Text();
+    }
 
-	public LazyString(LazyString copy) {
-		super(copy);
-		data = new Text(copy.data);
-	}
+    public LazyString(LazyString copy) {
+        super(copy);
+        data = new Text(copy.data);
+    }
 
-	VInt vInt = new LazyUtils.VInt();
+    VInt vInt = new LazyUtils.VInt();
 
-	@Override
-	public void init(byte[] bytes, int start, int length) {
-		if (length == 0) {
-			isNull = true;
-			return;
-		} else
-			isNull = false;
+    @Override
+    public void init(byte[] bytes, int start, int length) {
+        if (length == 0) {
+            isNull = true;
+            return;
+        } else
+            isNull = false;
 
-		// get the byte length of the string
-		LazyUtils.readVInt(bytes, start, vInt);
-		if (vInt.value + vInt.length != length)
-			throw new IllegalStateException(
-					"parse string: length mismatch, expected "
-							+ (vInt.value + vInt.length) + " but get " + length);
-		assert (length - vInt.length > -1);
-		data.set(bytes, start + vInt.length, length - vInt.length);
-	}
+        // get the byte length of the string
+        LazyUtils.readVInt(bytes, start, vInt);
+        if (vInt.value + vInt.length != length)
+            throw new IllegalStateException("parse string: length mismatch, expected " + (vInt.value + vInt.length)
+                    + " but get " + length);
+        assert (length - vInt.length > -1);
+        data.set(bytes, start + vInt.length, length - vInt.length);
+    }
 
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyStruct.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyStruct.java
index 61cc335..47e95e4 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyStruct.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyStruct.java
@@ -33,212 +33,202 @@
 /**
  * LazyStruct is serialized as follows: start A B A B A B end bytes[] ->
  * |-----|---------|--- ... ---|-----|---------|
- * 
  * Section A is one null-byte, corresponding to eight struct fields in Section
  * B. Each bit indicates whether the corresponding field is null (0) or not null
  * (1). Each field is a LazyObject.
- * 
  * Following B, there is another section A and B. This pattern repeats until the
  * all struct fields are serialized.
  */
 public class LazyStruct extends LazyNonPrimitive<LazyStructObjectInspector> {
 
-	private static Log LOG = LogFactory.getLog(LazyStruct.class.getName());
+    private static Log LOG = LogFactory.getLog(LazyStruct.class.getName());
 
-	/**
-	 * Whether the data is already parsed or not.
-	 */
-	boolean parsed;
+    /**
+     * Whether the data is already parsed or not.
+     */
+    boolean parsed;
 
-	/**
-	 * The fields of the struct.
-	 */
-	@SuppressWarnings("rawtypes")
-	LazyObject[] fields;
+    /**
+     * The fields of the struct.
+     */
+    @SuppressWarnings("rawtypes")
+    LazyObject[] fields;
 
-	/**
-	 * Whether a field is initialized or not.
-	 */
-	boolean[] fieldInited;
+    /**
+     * Whether a field is initialized or not.
+     */
+    boolean[] fieldInited;
 
-	/**
-	 * Whether a field is null or not. Because length is 0 does not means the
-	 * field is null. In particular, a 0-length string is not null.
-	 */
-	boolean[] fieldIsNull;
+    /**
+     * Whether a field is null or not. Because length is 0 does not means the
+     * field is null. In particular, a 0-length string is not null.
+     */
+    boolean[] fieldIsNull;
 
-	/**
-	 * The start positions and lengths of struct fields. Only valid when the
-	 * data is parsed.
-	 */
-	int[] fieldStart;
-	int[] fieldLength;
+    /**
+     * The start positions and lengths of struct fields. Only valid when the
+     * data is parsed.
+     */
+    int[] fieldStart;
+    int[] fieldLength;
 
-	/**
-	 * Construct a LazyStruct object with an ObjectInspector.
-	 */
-	protected LazyStruct(LazyStructObjectInspector oi) {
-		super(oi);
-	}
+    /**
+     * Construct a LazyStruct object with an ObjectInspector.
+     */
+    protected LazyStruct(LazyStructObjectInspector oi) {
+        super(oi);
+    }
 
-	@Override
-	public void init(byte[] bytes, int start, int length) {
-		super.init(bytes, start, length);
-		parsed = false;
-	}
+    @Override
+    public void init(byte[] bytes, int start, int length) {
+        super.init(bytes, start, length);
+        parsed = false;
+    }
 
-	RecordInfo recordInfo = new LazyUtils.RecordInfo();
-	boolean missingFieldWarned = false;
-	boolean extraFieldWarned = false;
+    RecordInfo recordInfo = new LazyUtils.RecordInfo();
+    boolean missingFieldWarned = false;
+    boolean extraFieldWarned = false;
 
-	/**
-	 * Parse the byte[] and fill fieldStart, fieldLength, fieldInited and
-	 * fieldIsNull.
-	 */
-	private void parse() {
+    /**
+     * Parse the byte[] and fill fieldStart, fieldLength, fieldInited and
+     * fieldIsNull.
+     */
+    private void parse() {
 
-		List<? extends StructField> fieldRefs = ((StructObjectInspector) oi)
-				.getAllStructFieldRefs();
+        List<? extends StructField> fieldRefs = ((StructObjectInspector) oi).getAllStructFieldRefs();
 
-		if (fields == null) {
-			fields = new LazyObject[fieldRefs.size()];
-			for (int i = 0; i < fields.length; i++) {
-				ObjectInspector insp = fieldRefs.get(i)
-						.getFieldObjectInspector();
-				fields[i] = insp == null ? null : LazyFactory
-						.createLazyObject(insp);
-			}
-			fieldInited = new boolean[fields.length];
-			fieldIsNull = new boolean[fields.length];
-			fieldStart = new int[fields.length];
-			fieldLength = new int[fields.length];
-		}
+        if (fields == null) {
+            fields = new LazyObject[fieldRefs.size()];
+            for (int i = 0; i < fields.length; i++) {
+                ObjectInspector insp = fieldRefs.get(i).getFieldObjectInspector();
+                fields[i] = insp == null ? null : LazyFactory.createLazyObject(insp);
+            }
+            fieldInited = new boolean[fields.length];
+            fieldIsNull = new boolean[fields.length];
+            fieldStart = new int[fields.length];
+            fieldLength = new int[fields.length];
+        }
 
-		/**
-		 * Please note that one null byte is followed by eight fields, then more
-		 * null byte and fields.
-		 */
+        /**
+         * Please note that one null byte is followed by eight fields, then more
+         * null byte and fields.
+         */
 
-		int fieldId = 0;
-		int structByteEnd = start + length;
+        int fieldId = 0;
+        int structByteEnd = start + length;
 
-		byte nullByte = bytes[start];
-		int lastFieldByteEnd = start + 1;
-		// Go through all bytes in the byte[]
-		for (int i = 0; i < fields.length; i++) {
-			fieldIsNull[i] = true;
-			if ((nullByte & (1 << (i % 8))) != 0) {
-				fieldIsNull[i] = false;
-				LazyUtils.checkObjectByteInfo(fieldRefs.get(i)
-						.getFieldObjectInspector(), bytes, lastFieldByteEnd,
-						recordInfo);
-				fieldStart[i] = lastFieldByteEnd + recordInfo.elementOffset;
-				fieldLength[i] = recordInfo.elementSize;
-				lastFieldByteEnd = fieldStart[i] + fieldLength[i];
-			}
+        byte nullByte = bytes[start];
+        int lastFieldByteEnd = start + 1;
+        // Go through all bytes in the byte[]
+        for (int i = 0; i < fields.length; i++) {
+            fieldIsNull[i] = true;
+            if ((nullByte & (1 << (i % 8))) != 0) {
+                fieldIsNull[i] = false;
+                LazyUtils.checkObjectByteInfo(fieldRefs.get(i).getFieldObjectInspector(), bytes, lastFieldByteEnd,
+                        recordInfo);
+                fieldStart[i] = lastFieldByteEnd + recordInfo.elementOffset;
+                fieldLength[i] = recordInfo.elementSize;
+                lastFieldByteEnd = fieldStart[i] + fieldLength[i];
+            }
 
-			// count how many fields are there
-			if (lastFieldByteEnd <= structByteEnd) {
-				fieldId++;
-			}
-			// next byte is a null byte if there are more bytes to go
-			if (7 == (i % 8)) {
-				if (lastFieldByteEnd < structByteEnd) {
-					nullByte = bytes[lastFieldByteEnd];
-					lastFieldByteEnd++;
-				} else {
-					// otherwise all null afterwards
-					nullByte = 0;
-					lastFieldByteEnd++;
-				}
-			}
-		}
+            // count how many fields are there
+            if (lastFieldByteEnd <= structByteEnd) {
+                fieldId++;
+            }
+            // next byte is a null byte if there are more bytes to go
+            if (7 == (i % 8)) {
+                if (lastFieldByteEnd < structByteEnd) {
+                    nullByte = bytes[lastFieldByteEnd];
+                    lastFieldByteEnd++;
+                } else {
+                    // otherwise all null afterwards
+                    nullByte = 0;
+                    lastFieldByteEnd++;
+                }
+            }
+        }
 
-		// Extra bytes at the end?
-		if (!extraFieldWarned && lastFieldByteEnd < structByteEnd) {
-			extraFieldWarned = true;
-			LOG.warn("Extra bytes detected at the end of the row! Ignoring similar "
-					+ "problems.");
-		}
+        // Extra bytes at the end?
+        if (!extraFieldWarned && lastFieldByteEnd < structByteEnd) {
+            extraFieldWarned = true;
+            LOG.warn("Extra bytes detected at the end of the row! Ignoring similar " + "problems.");
+        }
 
-		// Missing fields?
-		if (!missingFieldWarned && lastFieldByteEnd > structByteEnd) {
-			missingFieldWarned = true;
-			LOG.warn("Missing fields! Expected " + fields.length
-					+ " fields but " + "only got " + fieldId
-					+ "! Ignoring similar problems.");
-		}
+        // Missing fields?
+        if (!missingFieldWarned && lastFieldByteEnd > structByteEnd) {
+            missingFieldWarned = true;
+            LOG.warn("Missing fields! Expected " + fields.length + " fields but " + "only got " + fieldId
+                    + "! Ignoring similar problems.");
+        }
 
-		Arrays.fill(fieldInited, false);
-		parsed = true;
-	}
+        Arrays.fill(fieldInited, false);
+        parsed = true;
+    }
 
-	/**
-	 * Get one field out of the struct.
-	 * 
-	 * If the field is a primitive field, return the actual object. Otherwise
-	 * return the LazyObject. This is because PrimitiveObjectInspector does not
-	 * have control over the object used by the user - the user simply directly
-	 * use the Object instead of going through Object
-	 * PrimitiveObjectInspector.get(Object).
-	 * 
-	 * @param fieldID
-	 *            The field ID
-	 * @return The field as a LazyObject
-	 */
-	public Object getField(int fieldID) {
-		if (!parsed) {
-			parse();
-		}
-		return uncheckedGetField(fieldID);
-	}
+    /**
+     * Get one field out of the struct.
+     * If the field is a primitive field, return the actual object. Otherwise
+     * return the LazyObject. This is because PrimitiveObjectInspector does not
+     * have control over the object used by the user - the user simply directly
+     * use the Object instead of going through Object
+     * PrimitiveObjectInspector.get(Object).
+     * 
+     * @param fieldID
+     *            The field ID
+     * @return The field as a LazyObject
+     */
+    public Object getField(int fieldID) {
+        if (!parsed) {
+            parse();
+        }
+        return uncheckedGetField(fieldID);
+    }
 
-	/**
-	 * Get the field out of the row without checking parsed. This is called by
-	 * both getField and getFieldsAsList.
-	 * 
-	 * @param fieldID
-	 *            The id of the field starting from 0.
-	 * @return The value of the field
-	 */
-	private Object uncheckedGetField(int fieldID) {
-		// Test the length first so in most cases we avoid doing a byte[]
-		// comparison.
-		if (fieldIsNull[fieldID]) {
-			return null;
-		}
-		if (!fieldInited[fieldID]) {
-			fieldInited[fieldID] = true;
-			fields[fieldID].init(bytes, fieldStart[fieldID],
-					fieldLength[fieldID]);
-		}
-		return fields[fieldID].getObject();
-	}
+    /**
+     * Get the field out of the row without checking parsed. This is called by
+     * both getField and getFieldsAsList.
+     * 
+     * @param fieldID
+     *            The id of the field starting from 0.
+     * @return The value of the field
+     */
+    private Object uncheckedGetField(int fieldID) {
+        // Test the length first so in most cases we avoid doing a byte[]
+        // comparison.
+        if (fieldIsNull[fieldID]) {
+            return null;
+        }
+        if (!fieldInited[fieldID]) {
+            fieldInited[fieldID] = true;
+            fields[fieldID].init(bytes, fieldStart[fieldID], fieldLength[fieldID]);
+        }
+        return fields[fieldID].getObject();
+    }
 
-	ArrayList<Object> cachedList;
+    ArrayList<Object> cachedList;
 
-	/**
-	 * Get the values of the fields as an ArrayList.
-	 * 
-	 * @return The values of the fields as an ArrayList.
-	 */
-	public ArrayList<Object> getFieldsAsList() {
-		if (!parsed) {
-			parse();
-		}
-		if (cachedList == null) {
-			cachedList = new ArrayList<Object>();
-		} else {
-			cachedList.clear();
-		}
-		for (int i = 0; i < fields.length; i++) {
-			cachedList.add(uncheckedGetField(i));
-		}
-		return cachedList;
-	}
+    /**
+     * Get the values of the fields as an ArrayList.
+     * 
+     * @return The values of the fields as an ArrayList.
+     */
+    public ArrayList<Object> getFieldsAsList() {
+        if (!parsed) {
+            parse();
+        }
+        if (cachedList == null) {
+            cachedList = new ArrayList<Object>();
+        } else {
+            cachedList.clear();
+        }
+        for (int i = 0; i < fields.length; i++) {
+            cachedList.add(uncheckedGetField(i));
+        }
+        return cachedList;
+    }
 
-	@Override
-	public Object getObject() {
-		return this;
-	}
+    @Override
+    public Object getObject() {
+        return this;
+    }
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyUtils.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyUtils.java
index 2d0406c..6554ccc 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyUtils.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyUtils.java
@@ -42,488 +42,462 @@
 
 /**
  * LazyUtils.
- * 
  */
 public final class LazyUtils {
 
-	/**
-	 * Convert the byte array to an int starting from the given offset. Refer to
-	 * code by aeden on DZone Snippets:
-	 * 
-	 * @param b
-	 *            the byte array
-	 * @param offset
-	 *            the array offset
-	 * @return the integer
-	 */
-	public static int byteArrayToInt(byte[] b, int offset) {
-		int value = 0;
-		for (int i = 0; i < 4; i++) {
-			int shift = (4 - 1 - i) * 8;
-			value += (b[i + offset] & 0x000000FF) << shift;
-		}
-		return value;
-	}
+    /**
+     * Convert the byte array to an int starting from the given offset. Refer to
+     * code by aeden on DZone Snippets:
+     * 
+     * @param b
+     *            the byte array
+     * @param offset
+     *            the array offset
+     * @return the integer
+     */
+    public static int byteArrayToInt(byte[] b, int offset) {
+        int value = 0;
+        for (int i = 0; i < 4; i++) {
+            int shift = (4 - 1 - i) * 8;
+            value += (b[i + offset] & 0x000000FF) << shift;
+        }
+        return value;
+    }
 
-	/**
-	 * Convert the byte array to a long starting from the given offset.
-	 * 
-	 * @param b
-	 *            the byte array
-	 * @param offset
-	 *            the array offset
-	 * @return the long
-	 */
-	public static long byteArrayToLong(byte[] b, int offset) {
-		long value = 0;
-		for (int i = 0; i < 8; i++) {
-			int shift = (8 - 1 - i) * 8;
-			value += ((long) (b[i + offset] & 0x00000000000000FF)) << shift;
-		}
-		return value;
-	}
+    /**
+     * Convert the byte array to a long starting from the given offset.
+     * 
+     * @param b
+     *            the byte array
+     * @param offset
+     *            the array offset
+     * @return the long
+     */
+    public static long byteArrayToLong(byte[] b, int offset) {
+        long value = 0;
+        for (int i = 0; i < 8; i++) {
+            int shift = (8 - 1 - i) * 8;
+            value += ((long) (b[i + offset] & 0x00000000000000FF)) << shift;
+        }
+        return value;
+    }
 
-	/**
-	 * Convert the byte array to a short starting from the given offset.
-	 * 
-	 * @param b
-	 *            the byte array
-	 * @param offset
-	 *            the array offset
-	 * @return the short
-	 */
-	public static short byteArrayToShort(byte[] b, int offset) {
-		short value = 0;
-		value += (b[offset] & 0x000000FF) << 8;
-		value += (b[offset + 1] & 0x000000FF);
-		return value;
-	}
+    /**
+     * Convert the byte array to a short starting from the given offset.
+     * 
+     * @param b
+     *            the byte array
+     * @param offset
+     *            the array offset
+     * @return the short
+     */
+    public static short byteArrayToShort(byte[] b, int offset) {
+        short value = 0;
+        value += (b[offset] & 0x000000FF) << 8;
+        value += (b[offset + 1] & 0x000000FF);
+        return value;
+    }
 
-	/**
-	 * Record is the unit that data is serialized in. A record includes two
-	 * parts. The first part stores the size of the element and the second part
-	 * stores the real element. size element record ->
-	 * |----|-------------------------|
-	 * 
-	 * A RecordInfo stores two information of a record, the size of the "size"
-	 * part which is the element offset and the size of the element part which
-	 * is element size.
-	 */
-	public static class RecordInfo {
-		public RecordInfo() {
-			elementOffset = 0;
-			elementSize = 0;
-		}
+    /**
+     * Record is the unit that data is serialized in. A record includes two
+     * parts. The first part stores the size of the element and the second part
+     * stores the real element. size element record ->
+     * |----|-------------------------|
+     * A RecordInfo stores two information of a record, the size of the "size"
+     * part which is the element offset and the size of the element part which
+     * is element size.
+     */
+    public static class RecordInfo {
+        public RecordInfo() {
+            elementOffset = 0;
+            elementSize = 0;
+        }
 
-		public byte elementOffset;
-		public int elementSize;
+        public byte elementOffset;
+        public int elementSize;
 
-		@Override
-		public String toString() {
-			return "(" + elementOffset + ", " + elementSize + ")";
-		}
-	}
+        @Override
+        public String toString() {
+            return "(" + elementOffset + ", " + elementSize + ")";
+        }
+    }
 
-	static VInt vInt = new LazyUtils.VInt();
+    static VInt vInt = new LazyUtils.VInt();
 
-	/**
-	 * Check a particular field and set its size and offset in bytes based on
-	 * the field type and the bytes arrays.
-	 * 
-	 * For void, boolean, byte, short, int, long, float and double, there is no
-	 * offset and the size is fixed. For string, map, list, struct, the first
-	 * four bytes are used to store the size. So the offset is 4 and the size is
-	 * computed by concating the first four bytes together. The first four bytes
-	 * are defined with respect to the offset in the bytes arrays.
-	 * 
-	 * @param objectInspector
-	 *            object inspector of the field
-	 * @param bytes
-	 *            bytes arrays store the table row
-	 * @param offset
-	 *            offset of this field
-	 * @param recordInfo
-	 *            modify this byteinfo object and return it
-	 */
-	public static void checkObjectByteInfo(ObjectInspector objectInspector,
-			byte[] bytes, int offset, RecordInfo recordInfo) {
-		Category category = objectInspector.getCategory();
-		switch (category) {
-		case PRIMITIVE:
-			PrimitiveCategory primitiveCategory = ((PrimitiveObjectInspector) objectInspector)
-					.getPrimitiveCategory();
-			switch (primitiveCategory) {
-			case VOID:
-				recordInfo.elementOffset = 0;
-				recordInfo.elementSize = 0;
-				break;
-			case BOOLEAN:
-			case BYTE:
-				recordInfo.elementOffset = 0;
-				recordInfo.elementSize = 1;
-				break;
-			case SHORT:
-				recordInfo.elementOffset = 0;
-				recordInfo.elementSize = 2;
-				break;
-			case FLOAT:
-				recordInfo.elementOffset = 0;
-				recordInfo.elementSize = 4;
-				break;
-			case DOUBLE:
-				recordInfo.elementOffset = 0;
-				recordInfo.elementSize = 8;
-				break;
-			case INT:
-				recordInfo.elementOffset = 0;
-				recordInfo.elementSize = WritableUtils
-						.decodeVIntSize(bytes[offset]);
-				break;
-			case LONG:
-				recordInfo.elementOffset = 0;
-				recordInfo.elementSize = WritableUtils
-						.decodeVIntSize(bytes[offset]);
-				break;
-			case STRING:
-				// using vint instead of 4 bytes
-				LazyUtils.readVInt(bytes, offset, vInt);
-				recordInfo.elementOffset = vInt.length;
-				recordInfo.elementSize = vInt.value;
-				break;
-			default: {
-				throw new RuntimeException("Unrecognized primitive type: "
-						+ primitiveCategory);
-			}
-			}
-			break;
-		case LIST:
-		case MAP:
-		case STRUCT:
-			recordInfo.elementOffset = 4;
-			recordInfo.elementSize = LazyUtils.byteArrayToInt(bytes, offset);
-			break;
-		default: {
-			throw new RuntimeException("Unrecognized non-primitive type: "
-					+ category);
-		}
-		}
-	}
+    /**
+     * Check a particular field and set its size and offset in bytes based on
+     * the field type and the bytes arrays.
+     * For void, boolean, byte, short, int, long, float and double, there is no
+     * offset and the size is fixed. For string, map, list, struct, the first
+     * four bytes are used to store the size. So the offset is 4 and the size is
+     * computed by concating the first four bytes together. The first four bytes
+     * are defined with respect to the offset in the bytes arrays.
+     * 
+     * @param objectInspector
+     *            object inspector of the field
+     * @param bytes
+     *            bytes arrays store the table row
+     * @param offset
+     *            offset of this field
+     * @param recordInfo
+     *            modify this byteinfo object and return it
+     */
+    public static void checkObjectByteInfo(ObjectInspector objectInspector, byte[] bytes, int offset,
+            RecordInfo recordInfo) {
+        Category category = objectInspector.getCategory();
+        switch (category) {
+            case PRIMITIVE:
+                PrimitiveCategory primitiveCategory = ((PrimitiveObjectInspector) objectInspector)
+                        .getPrimitiveCategory();
+                switch (primitiveCategory) {
+                    case VOID:
+                        recordInfo.elementOffset = 0;
+                        recordInfo.elementSize = 0;
+                        break;
+                    case BOOLEAN:
+                    case BYTE:
+                        recordInfo.elementOffset = 0;
+                        recordInfo.elementSize = 1;
+                        break;
+                    case SHORT:
+                        recordInfo.elementOffset = 0;
+                        recordInfo.elementSize = 2;
+                        break;
+                    case FLOAT:
+                        recordInfo.elementOffset = 0;
+                        recordInfo.elementSize = 4;
+                        break;
+                    case DOUBLE:
+                        recordInfo.elementOffset = 0;
+                        recordInfo.elementSize = 8;
+                        break;
+                    case INT:
+                        recordInfo.elementOffset = 0;
+                        recordInfo.elementSize = WritableUtils.decodeVIntSize(bytes[offset]);
+                        break;
+                    case LONG:
+                        recordInfo.elementOffset = 0;
+                        recordInfo.elementSize = WritableUtils.decodeVIntSize(bytes[offset]);
+                        break;
+                    case STRING:
+                        // using vint instead of 4 bytes
+                        LazyUtils.readVInt(bytes, offset, vInt);
+                        recordInfo.elementOffset = vInt.length;
+                        recordInfo.elementSize = vInt.value;
+                        break;
+                    default: {
+                        throw new RuntimeException("Unrecognized primitive type: " + primitiveCategory);
+                    }
+                }
+                break;
+            case LIST:
+            case MAP:
+            case STRUCT:
+                recordInfo.elementOffset = 4;
+                recordInfo.elementSize = LazyUtils.byteArrayToInt(bytes, offset);
+                break;
+            default: {
+                throw new RuntimeException("Unrecognized non-primitive type: " + category);
+            }
+        }
+    }
 
-	/**
-	 * A zero-compressed encoded long.
-	 */
-	public static class VLong {
-		public VLong() {
-			value = 0;
-			length = 0;
-		}
+    /**
+     * A zero-compressed encoded long.
+     */
+    public static class VLong {
+        public VLong() {
+            value = 0;
+            length = 0;
+        }
 
-		public long value;
-		public byte length;
-	};
+        public long value;
+        public byte length;
+    };
 
-	/**
-	 * Reads a zero-compressed encoded long from a byte array and returns it.
-	 * 
-	 * @param bytes
-	 *            the byte array
-	 * @param offset
-	 *            offset of the array to read from
-	 * @param vlong
-	 *            storing the deserialized long and its size in byte
-	 */
-	public static void readVLong(byte[] bytes, int offset, VLong vlong) {
-		byte firstByte = bytes[offset];
-		vlong.length = (byte) WritableUtils.decodeVIntSize(firstByte);
-		if (vlong.length == 1) {
-			vlong.value = firstByte;
-			return;
-		}
-		long i = 0;
-		for (int idx = 0; idx < vlong.length - 1; idx++) {
-			byte b = bytes[offset + 1 + idx];
-			i = i << 8;
-			i = i | (b & 0xFF);
-		}
-		vlong.value = (WritableUtils.isNegativeVInt(firstByte) ? (i ^ -1L) : i);
-	}
+    /**
+     * Reads a zero-compressed encoded long from a byte array and returns it.
+     * 
+     * @param bytes
+     *            the byte array
+     * @param offset
+     *            offset of the array to read from
+     * @param vlong
+     *            storing the deserialized long and its size in byte
+     */
+    public static void readVLong(byte[] bytes, int offset, VLong vlong) {
+        byte firstByte = bytes[offset];
+        vlong.length = (byte) WritableUtils.decodeVIntSize(firstByte);
+        if (vlong.length == 1) {
+            vlong.value = firstByte;
+            return;
+        }
+        long i = 0;
+        for (int idx = 0; idx < vlong.length - 1; idx++) {
+            byte b = bytes[offset + 1 + idx];
+            i = i << 8;
+            i = i | (b & 0xFF);
+        }
+        vlong.value = (WritableUtils.isNegativeVInt(firstByte) ? (i ^ -1L) : i);
+    }
 
-	/**
-	 * A zero-compressed encoded integer.
-	 */
-	public static class VInt implements Serializable {
-		private static final long serialVersionUID = 1L;
+    /**
+     * A zero-compressed encoded integer.
+     */
+    public static class VInt implements Serializable {
+        private static final long serialVersionUID = 1L;
 
-		public VInt() {
-			value = 0;
-			length = 0;
-		}
+        public VInt() {
+            value = 0;
+            length = 0;
+        }
 
-		public int value;
-		public byte length;
-	};
+        public int value;
+        public byte length;
+    };
 
-	/**
-	 * Reads a zero-compressed encoded int from a byte array and returns it.
-	 * 
-	 * @param bytes
-	 *            the byte array
-	 * @param offset
-	 *            offset of the array to read from
-	 * @param vInt
-	 *            storing the deserialized int and its size in byte
-	 */
-	public static void readVInt(byte[] bytes, int offset, VInt vInt) {
-		byte firstByte = bytes[offset];
-		vInt.length = (byte) WritableUtils.decodeVIntSize(firstByte);
-		if (vInt.length == 1) {
-			vInt.value = firstByte;
-			return;
-		}
-		int i = 0;
-		for (int idx = 0; idx < vInt.length - 1; idx++) {
-			byte b = bytes[offset + 1 + idx];
-			i = i << 8;
-			i = i | (b & 0xFF);
-		}
-		vInt.value = (WritableUtils.isNegativeVInt(firstByte) ? (i ^ -1) : i);
-	}
+    /**
+     * Reads a zero-compressed encoded int from a byte array and returns it.
+     * 
+     * @param bytes
+     *            the byte array
+     * @param offset
+     *            offset of the array to read from
+     * @param vInt
+     *            storing the deserialized int and its size in byte
+     */
+    public static void readVInt(byte[] bytes, int offset, VInt vInt) {
+        byte firstByte = bytes[offset];
+        vInt.length = (byte) WritableUtils.decodeVIntSize(firstByte);
+        if (vInt.length == 1) {
+            vInt.value = firstByte;
+            return;
+        }
+        int i = 0;
+        for (int idx = 0; idx < vInt.length - 1; idx++) {
+            byte b = bytes[offset + 1 + idx];
+            i = i << 8;
+            i = i | (b & 0xFF);
+        }
+        vInt.value = (WritableUtils.isNegativeVInt(firstByte) ? (i ^ -1) : i);
+    }
 
-	/**
-	 * Writes a zero-compressed encoded int to a byte array.
-	 * 
-	 * @param byteStream
-	 *            the byte array/stream
-	 * @param i
-	 *            the int
-	 */
-	public static void writeVInt(Output byteStream, int i) {
-		writeVLong(byteStream, i);
-	}
+    /**
+     * Writes a zero-compressed encoded int to a byte array.
+     * 
+     * @param byteStream
+     *            the byte array/stream
+     * @param i
+     *            the int
+     */
+    public static void writeVInt(Output byteStream, int i) {
+        writeVLong(byteStream, i);
+    }
 
-	/**
-	 * Write a zero-compressed encoded long to a byte array.
-	 * 
-	 * @param byteStream
-	 *            the byte array/stream
-	 * @param l
-	 *            the long
-	 */
-	public static void writeVLong(Output byteStream, long l) {
-		if (l >= -112 && l <= 127) {
-			byteStream.write((byte) l);
-			return;
-		}
+    /**
+     * Write a zero-compressed encoded long to a byte array.
+     * 
+     * @param byteStream
+     *            the byte array/stream
+     * @param l
+     *            the long
+     */
+    public static void writeVLong(Output byteStream, long l) {
+        if (l >= -112 && l <= 127) {
+            byteStream.write((byte) l);
+            return;
+        }
 
-		int len = -112;
-		if (l < 0) {
-			l ^= -1L; // take one's complement'
-			len = -120;
-		}
+        int len = -112;
+        if (l < 0) {
+            l ^= -1L; // take one's complement'
+            len = -120;
+        }
 
-		long tmp = l;
-		while (tmp != 0) {
-			tmp = tmp >> 8;
-			len--;
-		}
+        long tmp = l;
+        while (tmp != 0) {
+            tmp = tmp >> 8;
+            len--;
+        }
 
-		byteStream.write((byte) len);
+        byteStream.write((byte) len);
 
-		len = (len < -120) ? -(len + 120) : -(len + 112);
+        len = (len < -120) ? -(len + 120) : -(len + 112);
 
-		for (int idx = len; idx != 0; idx--) {
-			int shiftbits = (idx - 1) * 8;
-			long mask = 0xFFL << shiftbits;
-			byteStream.write((byte) ((l & mask) >> shiftbits));
-		}
-	}
+        for (int idx = len; idx != 0; idx--) {
+            int shiftbits = (idx - 1) * 8;
+            long mask = 0xFFL << shiftbits;
+            byteStream.write((byte) ((l & mask) >> shiftbits));
+        }
+    }
 
-	static Map<TypeInfo, ObjectInspector> cachedLazyObjectInspector = new ConcurrentHashMap<TypeInfo, ObjectInspector>();
+    static Map<TypeInfo, ObjectInspector> cachedLazyObjectInspector = new ConcurrentHashMap<TypeInfo, ObjectInspector>();
 
-	/**
-	 * Returns the lazy binary object inspector that can be used to inspect an
-	 * lazy binary object of that typeInfo
-	 * 
-	 * For primitive types, we use the standard writable object inspector.
-	 */
-	public static ObjectInspector getLazyObjectInspectorFromTypeInfo(
-			TypeInfo typeInfo, boolean topLevel) {
-		if (typeInfo == null)
-			throw new IllegalStateException("illegal type null ");
-		ObjectInspector result = cachedLazyObjectInspector.get(typeInfo);
-		if (result == null) {
-			switch (typeInfo.getCategory()) {
-			case PRIMITIVE: {
-				result = PrimitiveObjectInspectorFactory
-						.getPrimitiveLazyObjectInspector(((PrimitiveTypeInfo) typeInfo)
-								.getPrimitiveCategory());
-				break;
-			}
-			case LIST: {
-				ObjectInspector elementObjectInspector = getLazyObjectInspectorFromTypeInfo(
-						((ListTypeInfo) typeInfo).getListElementTypeInfo(),
-						false);
-				result = LazyObjectInspectorFactory
-						.getLazyListObjectInspector(elementObjectInspector);
-				break;
-			}
-			case MAP: {
-				MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo;
-				ObjectInspector keyObjectInspector = getLazyObjectInspectorFromTypeInfo(
-						mapTypeInfo.getMapKeyTypeInfo(), false);
-				ObjectInspector valueObjectInspector = getLazyObjectInspectorFromTypeInfo(
-						mapTypeInfo.getMapValueTypeInfo(), false);
-				result = LazyObjectInspectorFactory.getLazyMapObjectInspector(
-						keyObjectInspector, valueObjectInspector);
-				break;
-			}
-			case STRUCT: {
-				StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
-				List<String> fieldNames = structTypeInfo
-						.getAllStructFieldNames();
-				List<TypeInfo> fieldTypeInfos = structTypeInfo
-						.getAllStructFieldTypeInfos();
-				List<ObjectInspector> fieldObjectInspectors = new ArrayList<ObjectInspector>(
-						fieldTypeInfos.size());
+    /**
+     * Returns the lazy binary object inspector that can be used to inspect an
+     * lazy binary object of that typeInfo
+     * For primitive types, we use the standard writable object inspector.
+     */
+    public static ObjectInspector getLazyObjectInspectorFromTypeInfo(TypeInfo typeInfo, boolean topLevel) {
+        if (typeInfo == null)
+            throw new IllegalStateException("illegal type null ");
+        ObjectInspector result = cachedLazyObjectInspector.get(typeInfo);
+        if (result == null) {
+            switch (typeInfo.getCategory()) {
+                case PRIMITIVE: {
+                    result = PrimitiveObjectInspectorFactory
+                            .getPrimitiveLazyObjectInspector(((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory());
+                    break;
+                }
+                case LIST: {
+                    ObjectInspector elementObjectInspector = getLazyObjectInspectorFromTypeInfo(
+                            ((ListTypeInfo) typeInfo).getListElementTypeInfo(), false);
+                    result = LazyObjectInspectorFactory.getLazyListObjectInspector(elementObjectInspector);
+                    break;
+                }
+                case MAP: {
+                    MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo;
+                    ObjectInspector keyObjectInspector = getLazyObjectInspectorFromTypeInfo(
+                            mapTypeInfo.getMapKeyTypeInfo(), false);
+                    ObjectInspector valueObjectInspector = getLazyObjectInspectorFromTypeInfo(
+                            mapTypeInfo.getMapValueTypeInfo(), false);
+                    result = LazyObjectInspectorFactory.getLazyMapObjectInspector(keyObjectInspector,
+                            valueObjectInspector);
+                    break;
+                }
+                case STRUCT: {
+                    StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
+                    List<String> fieldNames = structTypeInfo.getAllStructFieldNames();
+                    List<TypeInfo> fieldTypeInfos = structTypeInfo.getAllStructFieldTypeInfos();
+                    List<ObjectInspector> fieldObjectInspectors = new ArrayList<ObjectInspector>(fieldTypeInfos.size());
 
-				for (int i = 0; i < fieldTypeInfos.size(); i++) {
-					fieldObjectInspectors
-							.add(getLazyObjectInspectorFromTypeInfo(
-									fieldTypeInfos.get(i), false));
-				}
+                    for (int i = 0; i < fieldTypeInfos.size(); i++) {
+                        fieldObjectInspectors.add(getLazyObjectInspectorFromTypeInfo(fieldTypeInfos.get(i), false));
+                    }
 
-				// if it is top level then create columnar
-				if (topLevel)
-					result = LazyObjectInspectorFactory
-							.getLazyColumnarObjectInspector(fieldNames,
-									fieldObjectInspectors);
-				// if it is not top level then create struct
-				else
-					result = LazyObjectInspectorFactory
-							.getLazyStructObjectInspector(fieldNames,
-									fieldObjectInspectors);
+                    // if it is top level then create columnar
+                    if (topLevel)
+                        result = LazyObjectInspectorFactory.getLazyColumnarObjectInspector(fieldNames,
+                                fieldObjectInspectors);
+                    // if it is not top level then create struct
+                    else
+                        result = LazyObjectInspectorFactory.getLazyStructObjectInspector(fieldNames,
+                                fieldObjectInspectors);
 
-				break;
-			}
-			default: {
-				result = null;
-			}
-			}
-			cachedLazyObjectInspector.put(typeInfo, result);
-		}
-		return result;
-	}
+                    break;
+                }
+                default: {
+                    result = null;
+                }
+            }
+            cachedLazyObjectInspector.put(typeInfo, result);
+        }
+        return result;
+    }
 
-	/**
-	 * get top-level lazy object inspector
-	 * 
-	 * @param fieldNames
-	 * @param fieldTypeInfos
-	 * @return
-	 */
-	public static ObjectInspector getLazyObjectInspector(
-			List<String> fieldNames, List<TypeInfo> fieldTypeInfos) {
-		List<ObjectInspector> fieldObjectInspectors = new ArrayList<ObjectInspector>(
-				fieldTypeInfos.size());
-		for (int i = 0; i < fieldTypeInfos.size(); i++) {
-			fieldObjectInspectors.add(getLazyObjectInspectorFromTypeInfo(
-					fieldTypeInfos.get(i), false));
-		}
+    /**
+     * get top-level lazy object inspector
+     * 
+     * @param fieldNames
+     * @param fieldTypeInfos
+     * @return
+     */
+    public static ObjectInspector getLazyObjectInspector(List<String> fieldNames, List<TypeInfo> fieldTypeInfos) {
+        List<ObjectInspector> fieldObjectInspectors = new ArrayList<ObjectInspector>(fieldTypeInfos.size());
+        for (int i = 0; i < fieldTypeInfos.size(); i++) {
+            fieldObjectInspectors.add(getLazyObjectInspectorFromTypeInfo(fieldTypeInfos.get(i), false));
+        }
 
-		return LazyObjectInspectorFactory.getLazyColumnarObjectInspector(
-				fieldNames, fieldObjectInspectors);
-	}
+        return LazyObjectInspectorFactory.getLazyColumnarObjectInspector(fieldNames, fieldObjectInspectors);
+    }
 
-	private LazyUtils() {
-		// prevent instantiation
-	}
+    private LazyUtils() {
+        // prevent instantiation
+    }
 
-	/**
-	 * Returns -1 if the first byte sequence is lexicographically less than the
-	 * second; returns +1 if the second byte sequence is lexicographically less
-	 * than the first; otherwise return 0.
-	 */
-	public static int compare(byte[] b1, int start1, int length1, byte[] b2,
-			int start2, int length2) {
+    /**
+     * Returns -1 if the first byte sequence is lexicographically less than the
+     * second; returns +1 if the second byte sequence is lexicographically less
+     * than the first; otherwise return 0.
+     */
+    public static int compare(byte[] b1, int start1, int length1, byte[] b2, int start2, int length2) {
 
-		int min = Math.min(length1, length2);
+        int min = Math.min(length1, length2);
 
-		for (int i = 0; i < min; i++) {
-			if (b1[start1 + i] == b2[start2 + i]) {
-				continue;
-			}
-			if (b1[start1 + i] < b2[start2 + i]) {
-				return -1;
-			} else {
-				return 1;
-			}
-		}
+        for (int i = 0; i < min; i++) {
+            if (b1[start1 + i] == b2[start2 + i]) {
+                continue;
+            }
+            if (b1[start1 + i] < b2[start2 + i]) {
+                return -1;
+            } else {
+                return 1;
+            }
+        }
 
-		if (length1 < length2) {
-			return -1;
-		}
-		if (length1 > length2) {
-			return 1;
-		}
-		return 0;
-	}
+        if (length1 < length2) {
+            return -1;
+        }
+        if (length1 > length2) {
+            return 1;
+        }
+        return 0;
+    }
 
-	public static int hashBytes(byte[] data, int start, int len) {
-		int hash = 1;
-		for (int i = start; i < len; i++) {
-			hash = (31 * hash) + data[i];
-		}
-		return hash;
-	}
+    public static int hashBytes(byte[] data, int start, int len) {
+        int hash = 1;
+        for (int i = start; i < len; i++) {
+            hash = (31 * hash) + data[i];
+        }
+        return hash;
+    }
 
-	/**
-	 * Writes a zero-compressed encoded int to a byte array.
-	 * 
-	 * @param byteStream
-	 *            the byte array/stream
-	 * @param i
-	 *            the int
-	 */
-	public static void writeVInt(DataOutput byteStream, int i)
-			throws IOException {
-		writeVLong(byteStream, i);
-	}
+    /**
+     * Writes a zero-compressed encoded int to a byte array.
+     * 
+     * @param byteStream
+     *            the byte array/stream
+     * @param i
+     *            the int
+     */
+    public static void writeVInt(DataOutput byteStream, int i) throws IOException {
+        writeVLong(byteStream, i);
+    }
 
-	/**
-	 * Write a zero-compressed encoded long to a byte array.
-	 * 
-	 * @param byteStream
-	 *            the byte array/stream
-	 * @param l
-	 *            the long
-	 */
-	public static void writeVLong(DataOutput byteStream, long l)
-			throws IOException {
-		if (l >= -112 && l <= 127) {
-			byteStream.write((byte) l);
-			return;
-		}
+    /**
+     * Write a zero-compressed encoded long to a byte array.
+     * 
+     * @param byteStream
+     *            the byte array/stream
+     * @param l
+     *            the long
+     */
+    public static void writeVLong(DataOutput byteStream, long l) throws IOException {
+        if (l >= -112 && l <= 127) {
+            byteStream.write((byte) l);
+            return;
+        }
 
-		int len = -112;
-		if (l < 0) {
-			l ^= -1L; // take one's complement'
-			len = -120;
-		}
+        int len = -112;
+        if (l < 0) {
+            l ^= -1L; // take one's complement'
+            len = -120;
+        }
 
-		long tmp = l;
-		while (tmp != 0) {
-			tmp = tmp >> 8;
-			len--;
-		}
+        long tmp = l;
+        while (tmp != 0) {
+            tmp = tmp >> 8;
+            len--;
+        }
 
-		byteStream.write((byte) len);
+        byteStream.write((byte) len);
 
-		len = (len < -120) ? -(len + 120) : -(len + 112);
+        len = (len < -120) ? -(len + 120) : -(len + 112);
 
-		for (int idx = len; idx != 0; idx--) {
-			int shiftbits = (idx - 1) * 8;
-			long mask = 0xFFL << shiftbits;
-			byteStream.write((byte) ((l & mask) >> shiftbits));
-		}
-	}
+        for (int idx = len; idx != 0; idx--) {
+            int shiftbits = (idx - 1) * 8;
+            long mask = 0xFFL << shiftbits;
+            byteStream.write((byte) ((l & mask) >> shiftbits));
+        }
+    }
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyColumnarObjectInspector.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyColumnarObjectInspector.java
index b20f185..b1ca622 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyColumnarObjectInspector.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyColumnarObjectInspector.java
@@ -31,50 +31,47 @@
  * 
  * @see LazyColumnar
  */
-public class LazyColumnarObjectInspector extends StandardStructObjectInspector
-		implements Serializable {
+public class LazyColumnarObjectInspector extends StandardStructObjectInspector implements Serializable {
 
-	private static final long serialVersionUID = 1L;
+    private static final long serialVersionUID = 1L;
 
-	public LazyColumnarObjectInspector(List<String> structFieldNames,
-			List<ObjectInspector> structFieldObjectInspectors) {
-		super(structFieldNames, structFieldObjectInspectors);
-	}
+    public LazyColumnarObjectInspector(List<String> structFieldNames, List<ObjectInspector> structFieldObjectInspectors) {
+        super(structFieldNames, structFieldObjectInspectors);
+    }
 
-	public LazyColumnarObjectInspector(List<StructField> fields) {
-		super(fields);
-	}
+    public LazyColumnarObjectInspector(List<StructField> fields) {
+        super(fields);
+    }
 
-	@Override
-	public Object getStructFieldData(Object data, StructField fieldRef) {
-		if (data == null) {
-			return null;
-		}
-		LazyColumnar struct = (LazyColumnar) data;
-		MyField f = (MyField) fieldRef;
+    @Override
+    public Object getStructFieldData(Object data, StructField fieldRef) {
+        if (data == null) {
+            return null;
+        }
+        LazyColumnar struct = (LazyColumnar) data;
+        MyField f = (MyField) fieldRef;
 
-		int fieldID = f.getFieldID();
-		assert (fieldID >= 0 && fieldID < fields.size());
+        int fieldID = f.getFieldID();
+        assert (fieldID >= 0 && fieldID < fields.size());
 
-		Object column = struct.getField(fieldID);
-		return column;
-	}
+        Object column = struct.getField(fieldID);
+        return column;
+    }
 
-	@Override
-	public List<Object> getStructFieldsDataAsList(Object data) {
-		if (data == null) {
-			return null;
-		}
-		LazyColumnar struct = (LazyColumnar) data;
-		return struct.getFieldsAsList();
-	}
+    @Override
+    public List<Object> getStructFieldsDataAsList(Object data) {
+        if (data == null) {
+            return null;
+        }
+        LazyColumnar struct = (LazyColumnar) data;
+        return struct.getFieldsAsList();
+    }
 
-	public String toString() {
-		String str = "";
-		for (MyField f : fields) {
-			str += f.getFieldName() + ":"
-					+ f.getFieldObjectInspector().getTypeName() + "  ";
-		}
-		return str;
-	}
+    public String toString() {
+        String str = "";
+        for (MyField f : fields) {
+            str += f.getFieldName() + ":" + f.getFieldObjectInspector().getTypeName() + "  ";
+        }
+        return str;
+    }
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyListObjectInspector.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyListObjectInspector.java
index dc4e85b..aaa5d66 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyListObjectInspector.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyListObjectInspector.java
@@ -29,34 +29,34 @@
  */
 public class LazyListObjectInspector extends StandardListObjectInspector {
 
-	protected LazyListObjectInspector(ObjectInspector listElementObjectInspector) {
-		super(listElementObjectInspector);
-	}
+    protected LazyListObjectInspector(ObjectInspector listElementObjectInspector) {
+        super(listElementObjectInspector);
+    }
 
-	@Override
-	public List<?> getList(Object data) {
-		if (data == null) {
-			return null;
-		}
-		LazyArray array = (LazyArray) data;
-		return array.getList();
-	}
+    @Override
+    public List<?> getList(Object data) {
+        if (data == null) {
+            return null;
+        }
+        LazyArray array = (LazyArray) data;
+        return array.getList();
+    }
 
-	@Override
-	public Object getListElement(Object data, int index) {
-		if (data == null) {
-			return null;
-		}
-		LazyArray array = (LazyArray) data;
-		return array.getListElementObject(index);
-	}
+    @Override
+    public Object getListElement(Object data, int index) {
+        if (data == null) {
+            return null;
+        }
+        LazyArray array = (LazyArray) data;
+        return array.getListElementObject(index);
+    }
 
-	@Override
-	public int getListLength(Object data) {
-		if (data == null) {
-			return -1;
-		}
-		LazyArray array = (LazyArray) data;
-		return array.getListLength();
-	}
+    @Override
+    public int getListLength(Object data) {
+        if (data == null) {
+            return -1;
+        }
+        LazyArray array = (LazyArray) data;
+        return array.getListLength();
+    }
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyMapObjectInspector.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyMapObjectInspector.java
index a3be142..1b0c412 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyMapObjectInspector.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyMapObjectInspector.java
@@ -31,32 +31,31 @@
  */
 public class LazyMapObjectInspector extends StandardMapObjectInspector {
 
-	protected LazyMapObjectInspector(ObjectInspector mapKeyObjectInspector,
-			ObjectInspector mapValueObjectInspector) {
-		super(mapKeyObjectInspector, mapValueObjectInspector);
-	}
+    protected LazyMapObjectInspector(ObjectInspector mapKeyObjectInspector, ObjectInspector mapValueObjectInspector) {
+        super(mapKeyObjectInspector, mapValueObjectInspector);
+    }
 
-	@Override
-	public Map<?, ?> getMap(Object data) {
-		if (data == null) {
-			return null;
-		}
-		return ((LazyMap) data).getMap();
-	}
+    @Override
+    public Map<?, ?> getMap(Object data) {
+        if (data == null) {
+            return null;
+        }
+        return ((LazyMap) data).getMap();
+    }
 
-	@Override
-	public int getMapSize(Object data) {
-		if (data == null) {
-			return -1;
-		}
-		return ((LazyMap) data).getMapSize();
-	}
+    @Override
+    public int getMapSize(Object data) {
+        if (data == null) {
+            return -1;
+        }
+        return ((LazyMap) data).getMapSize();
+    }
 
-	@Override
-	public Object getMapValueElement(Object data, Object key) {
-		if (data == null) {
-			return -1;
-		}
-		return ((LazyMap) data).getMapValueElement(key);
-	}
+    @Override
+    public Object getMapValueElement(Object data, Object key) {
+        if (data == null) {
+            return -1;
+        }
+        return ((LazyMap) data).getMapValueElement(key);
+    }
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyObjectInspectorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyObjectInspectorFactory.java
index 439b130..8093c94 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyObjectInspectorFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyObjectInspectorFactory.java
@@ -9,10 +9,8 @@
 /**
  * ObjectInspectorFactory is the primary way to create new ObjectInspector
  * instances.
- * 
  * SerDe classes should call the static functions in this library to create an
  * ObjectInspector to return to the caller of SerDe2.getObjectInspector().
- * 
  * The reason of having caches here is that ObjectInspectors do not have an
  * internal state - so ObjectInspectors with the same construction parameters
  * should result in exactly the same ObjectInspector.
@@ -20,74 +18,65 @@
 
 public final class LazyObjectInspectorFactory {
 
-	static ConcurrentHashMap<ArrayList<Object>, LazyColumnarObjectInspector> cachedLazyColumnarObjectInspector = new ConcurrentHashMap<ArrayList<Object>, LazyColumnarObjectInspector>();
+    static ConcurrentHashMap<ArrayList<Object>, LazyColumnarObjectInspector> cachedLazyColumnarObjectInspector = new ConcurrentHashMap<ArrayList<Object>, LazyColumnarObjectInspector>();
 
-	static ConcurrentHashMap<ArrayList<Object>, LazyStructObjectInspector> cachedLazyStructObjectInspector = new ConcurrentHashMap<ArrayList<Object>, LazyStructObjectInspector>();
+    static ConcurrentHashMap<ArrayList<Object>, LazyStructObjectInspector> cachedLazyStructObjectInspector = new ConcurrentHashMap<ArrayList<Object>, LazyStructObjectInspector>();
 
-	static ConcurrentHashMap<ArrayList<Object>, LazyListObjectInspector> cachedLazyListObjectInspector = new ConcurrentHashMap<ArrayList<Object>, LazyListObjectInspector>();
+    static ConcurrentHashMap<ArrayList<Object>, LazyListObjectInspector> cachedLazyListObjectInspector = new ConcurrentHashMap<ArrayList<Object>, LazyListObjectInspector>();
 
-	static ConcurrentHashMap<ArrayList<Object>, LazyMapObjectInspector> cachedLazyMapObjectInspector = new ConcurrentHashMap<ArrayList<Object>, LazyMapObjectInspector>();
+    static ConcurrentHashMap<ArrayList<Object>, LazyMapObjectInspector> cachedLazyMapObjectInspector = new ConcurrentHashMap<ArrayList<Object>, LazyMapObjectInspector>();
 
-	public static LazyColumnarObjectInspector getLazyColumnarObjectInspector(
-			List<String> structFieldNames,
-			List<ObjectInspector> structFieldObjectInspectors) {
-		ArrayList<Object> signature = new ArrayList<Object>();
-		signature.add(structFieldNames);
-		signature.add(structFieldObjectInspectors);
-		LazyColumnarObjectInspector result = cachedLazyColumnarObjectInspector
-				.get(signature);
-		if (result == null) {
-			result = new LazyColumnarObjectInspector(structFieldNames,
-					structFieldObjectInspectors);
-			cachedLazyColumnarObjectInspector.put(signature, result);
-		}
-		return result;
-	}
+    public static LazyColumnarObjectInspector getLazyColumnarObjectInspector(List<String> structFieldNames,
+            List<ObjectInspector> structFieldObjectInspectors) {
+        ArrayList<Object> signature = new ArrayList<Object>();
+        signature.add(structFieldNames);
+        signature.add(structFieldObjectInspectors);
+        LazyColumnarObjectInspector result = cachedLazyColumnarObjectInspector.get(signature);
+        if (result == null) {
+            result = new LazyColumnarObjectInspector(structFieldNames, structFieldObjectInspectors);
+            cachedLazyColumnarObjectInspector.put(signature, result);
+        }
+        return result;
+    }
 
-	public static LazyStructObjectInspector getLazyStructObjectInspector(
-			List<String> structFieldNames,
-			List<ObjectInspector> structFieldObjectInspectors) {
-		ArrayList<Object> signature = new ArrayList<Object>();
-		signature.add(structFieldNames);
-		signature.add(structFieldObjectInspectors);
-		LazyStructObjectInspector result = cachedLazyStructObjectInspector
-				.get(signature);
-		if (result == null) {
-			result = new LazyStructObjectInspector(structFieldNames,
-					structFieldObjectInspectors);
-			cachedLazyStructObjectInspector.put(signature, result);
-		}
-		return result;
-	}
+    public static LazyStructObjectInspector getLazyStructObjectInspector(List<String> structFieldNames,
+            List<ObjectInspector> structFieldObjectInspectors) {
+        ArrayList<Object> signature = new ArrayList<Object>();
+        signature.add(structFieldNames);
+        signature.add(structFieldObjectInspectors);
+        LazyStructObjectInspector result = cachedLazyStructObjectInspector.get(signature);
+        if (result == null) {
+            result = new LazyStructObjectInspector(structFieldNames, structFieldObjectInspectors);
+            cachedLazyStructObjectInspector.put(signature, result);
+        }
+        return result;
+    }
 
-	public static LazyListObjectInspector getLazyListObjectInspector(
-			ObjectInspector listElementInspector) {
-		ArrayList<Object> signature = new ArrayList<Object>();
-		signature.add(listElementInspector);
-		LazyListObjectInspector result = cachedLazyListObjectInspector
-				.get(signature);
-		if (result == null) {
-			result = new LazyListObjectInspector(listElementInspector);
-			cachedLazyListObjectInspector.put(signature, result);
-		}
-		return result;
-	}
+    public static LazyListObjectInspector getLazyListObjectInspector(ObjectInspector listElementInspector) {
+        ArrayList<Object> signature = new ArrayList<Object>();
+        signature.add(listElementInspector);
+        LazyListObjectInspector result = cachedLazyListObjectInspector.get(signature);
+        if (result == null) {
+            result = new LazyListObjectInspector(listElementInspector);
+            cachedLazyListObjectInspector.put(signature, result);
+        }
+        return result;
+    }
 
-	public static LazyMapObjectInspector getLazyMapObjectInspector(
-			ObjectInspector keyInspector, ObjectInspector valueInspector) {
-		ArrayList<Object> signature = new ArrayList<Object>();
-		signature.add(keyInspector);
-		signature.add(valueInspector);
-		LazyMapObjectInspector result = cachedLazyMapObjectInspector
-				.get(signature);
-		if (result == null) {
-			result = new LazyMapObjectInspector(keyInspector, valueInspector);
-			cachedLazyMapObjectInspector.put(signature, result);
-		}
-		return result;
-	}
+    public static LazyMapObjectInspector getLazyMapObjectInspector(ObjectInspector keyInspector,
+            ObjectInspector valueInspector) {
+        ArrayList<Object> signature = new ArrayList<Object>();
+        signature.add(keyInspector);
+        signature.add(valueInspector);
+        LazyMapObjectInspector result = cachedLazyMapObjectInspector.get(signature);
+        if (result == null) {
+            result = new LazyMapObjectInspector(keyInspector, valueInspector);
+            cachedLazyMapObjectInspector.put(signature, result);
+        }
+        return result;
+    }
 
-	private LazyObjectInspectorFactory() {
-		// prevent instantiation
-	}
+    private LazyObjectInspectorFactory() {
+        // prevent instantiation
+    }
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyStructObjectInspector.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyStructObjectInspector.java
index 1a50233..ad70d4c 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyStructObjectInspector.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyStructObjectInspector.java
@@ -32,35 +32,34 @@
  */
 public class LazyStructObjectInspector extends StandardStructObjectInspector {
 
-	protected LazyStructObjectInspector(List<String> structFieldNames,
-			List<ObjectInspector> structFieldObjectInspectors) {
-		super(structFieldNames, structFieldObjectInspectors);
-	}
+    protected LazyStructObjectInspector(List<String> structFieldNames, List<ObjectInspector> structFieldObjectInspectors) {
+        super(structFieldNames, structFieldObjectInspectors);
+    }
 
-	protected LazyStructObjectInspector(List<StructField> fields) {
-		super(fields);
-	}
+    protected LazyStructObjectInspector(List<StructField> fields) {
+        super(fields);
+    }
 
-	@Override
-	public Object getStructFieldData(Object data, StructField fieldRef) {
-		if (data == null) {
-			return null;
-		}
-		LazyStruct struct = (LazyStruct) data;
-		MyField f = (MyField) fieldRef;
+    @Override
+    public Object getStructFieldData(Object data, StructField fieldRef) {
+        if (data == null) {
+            return null;
+        }
+        LazyStruct struct = (LazyStruct) data;
+        MyField f = (MyField) fieldRef;
 
-		int fieldID = f.getFieldID();
-		assert (fieldID >= 0 && fieldID < fields.size());
+        int fieldID = f.getFieldID();
+        assert (fieldID >= 0 && fieldID < fields.size());
 
-		return struct.getField(fieldID);
-	}
+        return struct.getField(fieldID);
+    }
 
-	@Override
-	public List<Object> getStructFieldsDataAsList(Object data) {
-		if (data == null) {
-			return null;
-		}
-		LazyStruct struct = (LazyStruct) data;
-		return struct.getFieldsAsList();
-	}
+    @Override
+    public List<Object> getStructFieldsDataAsList(Object data) {
+        if (data == null) {
+            return null;
+        }
+        LazyStruct struct = (LazyStruct) data;
+        return struct.getFieldsAsList();
+    }
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/AbstractPrimitiveLazyObjectInspector.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/AbstractPrimitiveLazyObjectInspector.java
index 7ef8bdd..eaa2bbc 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/AbstractPrimitiveLazyObjectInspector.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/AbstractPrimitiveLazyObjectInspector.java
@@ -26,24 +26,23 @@
 /**
  * An AbstractPrimitiveLazyObjectInspector for a LazyPrimitive object.
  */
-public abstract class AbstractPrimitiveLazyObjectInspector<T extends Writable>
-		extends AbstractPrimitiveObjectInspector {
+public abstract class AbstractPrimitiveLazyObjectInspector<T extends Writable> extends AbstractPrimitiveObjectInspector {
 
-	protected AbstractPrimitiveLazyObjectInspector(PrimitiveTypeEntry typeEntry) {
-		super(typeEntry);
-	}
+    protected AbstractPrimitiveLazyObjectInspector(PrimitiveTypeEntry typeEntry) {
+        super(typeEntry);
+    }
 
-	@SuppressWarnings("unchecked")
-	@Override
-	public T getPrimitiveWritableObject(Object o) {
-		if (o == null)
-			System.out.println("sth. wrong");
-		return o == null ? null : ((LazyPrimitive<?, T>) o).getWritableObject();
-	}
+    @SuppressWarnings("unchecked")
+    @Override
+    public T getPrimitiveWritableObject(Object o) {
+        if (o == null)
+            System.out.println("sth. wrong");
+        return o == null ? null : ((LazyPrimitive<?, T>) o).getWritableObject();
+    }
 
-	@Override
-	public boolean preferWritable() {
-		return true;
-	}
+    @Override
+    public boolean preferWritable() {
+        return true;
+    }
 
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyBooleanObjectInspector.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyBooleanObjectInspector.java
index 472dce0..7927c1e 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyBooleanObjectInspector.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyBooleanObjectInspector.java
@@ -26,26 +26,25 @@
 /**
  * A WritableBooleanObjectInspector inspects a BooleanWritable Object.
  */
-public class LazyBooleanObjectInspector extends
-		AbstractPrimitiveLazyObjectInspector<BooleanWritable> implements
-		BooleanObjectInspector {
+public class LazyBooleanObjectInspector extends AbstractPrimitiveLazyObjectInspector<BooleanWritable> implements
+        BooleanObjectInspector {
 
-	LazyBooleanObjectInspector() {
-		super(PrimitiveObjectInspectorUtils.booleanTypeEntry);
-	}
+    LazyBooleanObjectInspector() {
+        super(PrimitiveObjectInspectorUtils.booleanTypeEntry);
+    }
 
-	@Override
-	public boolean get(Object o) {
-		return getPrimitiveWritableObject(o).get();
-	}
+    @Override
+    public boolean get(Object o) {
+        return getPrimitiveWritableObject(o).get();
+    }
 
-	@Override
-	public Object copyObject(Object o) {
-		return o == null ? null : new LazyBoolean((LazyBoolean) o);
-	}
+    @Override
+    public Object copyObject(Object o) {
+        return o == null ? null : new LazyBoolean((LazyBoolean) o);
+    }
 
-	@Override
-	public Object getPrimitiveJavaObject(Object o) {
-		return o == null ? null : Boolean.valueOf(get(o));
-	}
+    @Override
+    public Object getPrimitiveJavaObject(Object o) {
+        return o == null ? null : Boolean.valueOf(get(o));
+    }
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyByteObjectInspector.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyByteObjectInspector.java
index e631fc7..10a881c 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyByteObjectInspector.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyByteObjectInspector.java
@@ -26,26 +26,25 @@
 /**
  * A WritableByteObjectInspector inspects a ByteWritable Object.
  */
-public class LazyByteObjectInspector extends
-		AbstractPrimitiveLazyObjectInspector<ByteWritable> implements
-		ByteObjectInspector {
+public class LazyByteObjectInspector extends AbstractPrimitiveLazyObjectInspector<ByteWritable> implements
+        ByteObjectInspector {
 
-	LazyByteObjectInspector() {
-		super(PrimitiveObjectInspectorUtils.byteTypeEntry);
-	}
+    LazyByteObjectInspector() {
+        super(PrimitiveObjectInspectorUtils.byteTypeEntry);
+    }
 
-	@Override
-	public byte get(Object o) {
-		return getPrimitiveWritableObject(o).get();
-	}
+    @Override
+    public byte get(Object o) {
+        return getPrimitiveWritableObject(o).get();
+    }
 
-	@Override
-	public Object copyObject(Object o) {
-		return o == null ? null : new LazyByte((LazyByte) o);
-	}
+    @Override
+    public Object copyObject(Object o) {
+        return o == null ? null : new LazyByte((LazyByte) o);
+    }
 
-	@Override
-	public Object getPrimitiveJavaObject(Object o) {
-		return o == null ? null : Byte.valueOf(get(o));
-	}
+    @Override
+    public Object getPrimitiveJavaObject(Object o) {
+        return o == null ? null : Byte.valueOf(get(o));
+    }
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyDoubleObjectInspector.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyDoubleObjectInspector.java
index 1257f11..9f98b56 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyDoubleObjectInspector.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyDoubleObjectInspector.java
@@ -26,26 +26,25 @@
 /**
  * A WritableDoubleObjectInspector inspects a DoubleWritable Object.
  */
-public class LazyDoubleObjectInspector extends
-		AbstractPrimitiveLazyObjectInspector<DoubleWritable> implements
-		DoubleObjectInspector {
+public class LazyDoubleObjectInspector extends AbstractPrimitiveLazyObjectInspector<DoubleWritable> implements
+        DoubleObjectInspector {
 
-	LazyDoubleObjectInspector() {
-		super(PrimitiveObjectInspectorUtils.doubleTypeEntry);
-	}
+    LazyDoubleObjectInspector() {
+        super(PrimitiveObjectInspectorUtils.doubleTypeEntry);
+    }
 
-	@Override
-	public double get(Object o) {
-		return getPrimitiveWritableObject(o).get();
-	}
+    @Override
+    public double get(Object o) {
+        return getPrimitiveWritableObject(o).get();
+    }
 
-	@Override
-	public Object copyObject(Object o) {
-		return o == null ? null : new LazyDouble((LazyDouble) o);
-	}
+    @Override
+    public Object copyObject(Object o) {
+        return o == null ? null : new LazyDouble((LazyDouble) o);
+    }
 
-	@Override
-	public Object getPrimitiveJavaObject(Object o) {
-		return o == null ? null : Double.valueOf(get(o));
-	}
+    @Override
+    public Object getPrimitiveJavaObject(Object o) {
+        return o == null ? null : Double.valueOf(get(o));
+    }
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyFloatObjectInspector.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyFloatObjectInspector.java
index c66a06f..bf3e9a2 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyFloatObjectInspector.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyFloatObjectInspector.java
@@ -26,26 +26,25 @@
 /**
  * A FloatObjectInspector inspects a FloatWritable Object.
  */
-public class LazyFloatObjectInspector extends
-		AbstractPrimitiveLazyObjectInspector<FloatWritable> implements
-		FloatObjectInspector {
+public class LazyFloatObjectInspector extends AbstractPrimitiveLazyObjectInspector<FloatWritable> implements
+        FloatObjectInspector {
 
-	LazyFloatObjectInspector() {
-		super(PrimitiveObjectInspectorUtils.floatTypeEntry);
-	}
+    LazyFloatObjectInspector() {
+        super(PrimitiveObjectInspectorUtils.floatTypeEntry);
+    }
 
-	@Override
-	public float get(Object o) {
-		return getPrimitiveWritableObject(o).get();
-	}
+    @Override
+    public float get(Object o) {
+        return getPrimitiveWritableObject(o).get();
+    }
 
-	@Override
-	public Object copyObject(Object o) {
-		return o == null ? null : new LazyFloat((LazyFloat) o);
-	}
+    @Override
+    public Object copyObject(Object o) {
+        return o == null ? null : new LazyFloat((LazyFloat) o);
+    }
 
-	@Override
-	public Object getPrimitiveJavaObject(Object o) {
-		return o == null ? null : Float.valueOf(get(o));
-	}
+    @Override
+    public Object getPrimitiveJavaObject(Object o) {
+        return o == null ? null : Float.valueOf(get(o));
+    }
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyIntObjectInspector.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyIntObjectInspector.java
index b2159e0..87bcb0d 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyIntObjectInspector.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyIntObjectInspector.java
@@ -26,26 +26,25 @@
 /**
  * A WritableIntObjectInspector inspects a IntWritable Object.
  */
-public class LazyIntObjectInspector extends
-		AbstractPrimitiveLazyObjectInspector<IntWritable> implements
-		IntObjectInspector {
+public class LazyIntObjectInspector extends AbstractPrimitiveLazyObjectInspector<IntWritable> implements
+        IntObjectInspector {
 
-	LazyIntObjectInspector() {
-		super(PrimitiveObjectInspectorUtils.intTypeEntry);
-	}
+    LazyIntObjectInspector() {
+        super(PrimitiveObjectInspectorUtils.intTypeEntry);
+    }
 
-	@Override
-	public int get(Object o) {
-		return getPrimitiveWritableObject(o).get();
-	}
+    @Override
+    public int get(Object o) {
+        return getPrimitiveWritableObject(o).get();
+    }
 
-	@Override
-	public Object copyObject(Object o) {
-		return o == null ? null : new LazyInteger((LazyInteger) o);
-	}
+    @Override
+    public Object copyObject(Object o) {
+        return o == null ? null : new LazyInteger((LazyInteger) o);
+    }
 
-	@Override
-	public Object getPrimitiveJavaObject(Object o) {
-		return o == null ? null : Integer.valueOf(get(o));
-	}
+    @Override
+    public Object getPrimitiveJavaObject(Object o) {
+        return o == null ? null : Integer.valueOf(get(o));
+    }
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyLongObjectInspector.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyLongObjectInspector.java
index 1fc2d53..06b5d3c 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyLongObjectInspector.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyLongObjectInspector.java
@@ -26,26 +26,25 @@
 /**
  * A WritableLongObjectInspector inspects a LongWritable Object.
  */
-public class LazyLongObjectInspector extends
-		AbstractPrimitiveLazyObjectInspector<LongWritable> implements
-		LongObjectInspector {
+public class LazyLongObjectInspector extends AbstractPrimitiveLazyObjectInspector<LongWritable> implements
+        LongObjectInspector {
 
-	LazyLongObjectInspector() {
-		super(PrimitiveObjectInspectorUtils.longTypeEntry);
-	}
+    LazyLongObjectInspector() {
+        super(PrimitiveObjectInspectorUtils.longTypeEntry);
+    }
 
-	@Override
-	public long get(Object o) {
-		return getPrimitiveWritableObject(o).get();
-	}
+    @Override
+    public long get(Object o) {
+        return getPrimitiveWritableObject(o).get();
+    }
 
-	@Override
-	public Object copyObject(Object o) {
-		return o == null ? null : new LazyLong((LazyLong) o);
-	}
+    @Override
+    public Object copyObject(Object o) {
+        return o == null ? null : new LazyLong((LazyLong) o);
+    }
 
-	@Override
-	public Object getPrimitiveJavaObject(Object o) {
-		return o == null ? null : Long.valueOf(get(o));
-	}
+    @Override
+    public Object getPrimitiveJavaObject(Object o) {
+        return o == null ? null : Long.valueOf(get(o));
+    }
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java
index 134dc5a..5d7ef48 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java
@@ -26,10 +26,8 @@
 /**
  * LazyPrimitiveObjectInspectorFactory is the primary way to create new
  * ObjectInspector instances.
- * 
  * SerDe classes should call the static functions in this library to create an
  * ObjectInspector to return to the caller of SerDe2.getObjectInspector().
- * 
  * The reason of having caches here is that ObjectInspector is because
  * ObjectInspectors do not have an internal state - so ObjectInspectors with the
  * same construction parameters should result in exactly the same
@@ -37,63 +35,58 @@
  */
 public final class LazyPrimitiveObjectInspectorFactory {
 
-	public static final LazyBooleanObjectInspector LAZY_BOOLEAN_OBJECT_INSPECTOR = new LazyBooleanObjectInspector();
-	public static final LazyByteObjectInspector LAZY_BYTE_OBJECT_INSPECTOR = new LazyByteObjectInspector();
-	public static final LazyShortObjectInspector LAZY_SHORT_OBJECT_INSPECTOR = new LazyShortObjectInspector();
-	public static final LazyIntObjectInspector LAZY_INT_OBJECT_INSPECTOR = new LazyIntObjectInspector();
-	public static final LazyLongObjectInspector LAZY_LONG_OBJECT_INSPECTOR = new LazyLongObjectInspector();
-	public static final LazyFloatObjectInspector LAZY_FLOAT_OBJECT_INSPECTOR = new LazyFloatObjectInspector();
-	public static final LazyDoubleObjectInspector LAZY_DOUBLE_OBJECT_INSPECTOR = new LazyDoubleObjectInspector();
-	public static final LazyVoidObjectInspector LAZY_VOID_OBJECT_INSPECTOR = new LazyVoidObjectInspector();
+    public static final LazyBooleanObjectInspector LAZY_BOOLEAN_OBJECT_INSPECTOR = new LazyBooleanObjectInspector();
+    public static final LazyByteObjectInspector LAZY_BYTE_OBJECT_INSPECTOR = new LazyByteObjectInspector();
+    public static final LazyShortObjectInspector LAZY_SHORT_OBJECT_INSPECTOR = new LazyShortObjectInspector();
+    public static final LazyIntObjectInspector LAZY_INT_OBJECT_INSPECTOR = new LazyIntObjectInspector();
+    public static final LazyLongObjectInspector LAZY_LONG_OBJECT_INSPECTOR = new LazyLongObjectInspector();
+    public static final LazyFloatObjectInspector LAZY_FLOAT_OBJECT_INSPECTOR = new LazyFloatObjectInspector();
+    public static final LazyDoubleObjectInspector LAZY_DOUBLE_OBJECT_INSPECTOR = new LazyDoubleObjectInspector();
+    public static final LazyVoidObjectInspector LAZY_VOID_OBJECT_INSPECTOR = new LazyVoidObjectInspector();
 
-	static ConcurrentHashMap<ArrayList<Object>, LazyStringObjectInspector> cachedLazyStringObjectInspector = new ConcurrentHashMap<ArrayList<Object>, LazyStringObjectInspector>();
+    static ConcurrentHashMap<ArrayList<Object>, LazyStringObjectInspector> cachedLazyStringObjectInspector = new ConcurrentHashMap<ArrayList<Object>, LazyStringObjectInspector>();
 
-	public static LazyStringObjectInspector getLazyStringObjectInspector(
-			boolean escaped, byte escapeChar) {
-		ArrayList<Object> signature = new ArrayList<Object>();
-		signature.add(Boolean.valueOf(escaped));
-		signature.add(Byte.valueOf(escapeChar));
-		LazyStringObjectInspector result = cachedLazyStringObjectInspector
-				.get(signature);
-		if (result == null) {
-			result = new LazyStringObjectInspector(escaped, escapeChar);
-			cachedLazyStringObjectInspector.put(signature, result);
-		}
-		return result;
-	}
+    public static LazyStringObjectInspector getLazyStringObjectInspector(boolean escaped, byte escapeChar) {
+        ArrayList<Object> signature = new ArrayList<Object>();
+        signature.add(Boolean.valueOf(escaped));
+        signature.add(Byte.valueOf(escapeChar));
+        LazyStringObjectInspector result = cachedLazyStringObjectInspector.get(signature);
+        if (result == null) {
+            result = new LazyStringObjectInspector(escaped, escapeChar);
+            cachedLazyStringObjectInspector.put(signature, result);
+        }
+        return result;
+    }
 
-	public static AbstractPrimitiveLazyObjectInspector<?> getLazyObjectInspector(
-			PrimitiveCategory primitiveCategory, boolean escaped,
-			byte escapeChar) {
+    public static AbstractPrimitiveLazyObjectInspector<?> getLazyObjectInspector(PrimitiveCategory primitiveCategory,
+            boolean escaped, byte escapeChar) {
 
-		switch (primitiveCategory) {
-		case BOOLEAN:
-			return LAZY_BOOLEAN_OBJECT_INSPECTOR;
-		case BYTE:
-			return LAZY_BYTE_OBJECT_INSPECTOR;
-		case SHORT:
-			return LAZY_SHORT_OBJECT_INSPECTOR;
-		case INT:
-			return LAZY_INT_OBJECT_INSPECTOR;
-		case LONG:
-			return LAZY_LONG_OBJECT_INSPECTOR;
-		case FLOAT:
-			return LAZY_FLOAT_OBJECT_INSPECTOR;
-		case DOUBLE:
-			return LAZY_DOUBLE_OBJECT_INSPECTOR;
-		case STRING:
-			return getLazyStringObjectInspector(escaped, escapeChar);
-		case VOID:
-			return LAZY_VOID_OBJECT_INSPECTOR;
-		default:
-			throw new RuntimeException(
-					"Internal error: Cannot find ObjectInspector " + " for "
-							+ primitiveCategory);
-		}
-	}
+        switch (primitiveCategory) {
+            case BOOLEAN:
+                return LAZY_BOOLEAN_OBJECT_INSPECTOR;
+            case BYTE:
+                return LAZY_BYTE_OBJECT_INSPECTOR;
+            case SHORT:
+                return LAZY_SHORT_OBJECT_INSPECTOR;
+            case INT:
+                return LAZY_INT_OBJECT_INSPECTOR;
+            case LONG:
+                return LAZY_LONG_OBJECT_INSPECTOR;
+            case FLOAT:
+                return LAZY_FLOAT_OBJECT_INSPECTOR;
+            case DOUBLE:
+                return LAZY_DOUBLE_OBJECT_INSPECTOR;
+            case STRING:
+                return getLazyStringObjectInspector(escaped, escapeChar);
+            case VOID:
+                return LAZY_VOID_OBJECT_INSPECTOR;
+            default:
+                throw new RuntimeException("Internal error: Cannot find ObjectInspector " + " for " + primitiveCategory);
+        }
+    }
 
-	private LazyPrimitiveObjectInspectorFactory() {
-		// prevent instantiation
-	}
+    private LazyPrimitiveObjectInspectorFactory() {
+        // prevent instantiation
+    }
 
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyShortObjectInspector.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyShortObjectInspector.java
index cb06dfd..b02d9bc 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyShortObjectInspector.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyShortObjectInspector.java
@@ -26,26 +26,25 @@
 /**
  * A WritableShortObjectInspector inspects a ShortWritable Object.
  */
-public class LazyShortObjectInspector extends
-		AbstractPrimitiveLazyObjectInspector<ShortWritable> implements
-		ShortObjectInspector {
+public class LazyShortObjectInspector extends AbstractPrimitiveLazyObjectInspector<ShortWritable> implements
+        ShortObjectInspector {
 
-	LazyShortObjectInspector() {
-		super(PrimitiveObjectInspectorUtils.shortTypeEntry);
-	}
+    LazyShortObjectInspector() {
+        super(PrimitiveObjectInspectorUtils.shortTypeEntry);
+    }
 
-	@Override
-	public short get(Object o) {
-		return getPrimitiveWritableObject(o).get();
-	}
+    @Override
+    public short get(Object o) {
+        return getPrimitiveWritableObject(o).get();
+    }
 
-	@Override
-	public Object copyObject(Object o) {
-		return o == null ? null : new LazyShort((LazyShort) o);
-	}
+    @Override
+    public Object copyObject(Object o) {
+        return o == null ? null : new LazyShort((LazyShort) o);
+    }
 
-	@Override
-	public Object getPrimitiveJavaObject(Object o) {
-		return o == null ? null : Short.valueOf(get(o));
-	}
+    @Override
+    public Object getPrimitiveJavaObject(Object o) {
+        return o == null ? null : Short.valueOf(get(o));
+    }
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyStringObjectInspector.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyStringObjectInspector.java
index 5832f34..4d649dc 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyStringObjectInspector.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyStringObjectInspector.java
@@ -26,41 +26,39 @@
 /**
  * A WritableStringObjectInspector inspects a Text Object.
  */
-public class LazyStringObjectInspector extends
-		AbstractPrimitiveLazyObjectInspector<Text> implements
-		StringObjectInspector {
+public class LazyStringObjectInspector extends AbstractPrimitiveLazyObjectInspector<Text> implements
+        StringObjectInspector {
 
-	boolean escaped;
-	byte escapeChar;
+    boolean escaped;
+    byte escapeChar;
 
-	LazyStringObjectInspector(boolean escaped, byte escapeChar) {
-		super(PrimitiveObjectInspectorUtils.stringTypeEntry);
-		this.escaped = escaped;
-		this.escapeChar = escapeChar;
-	}
+    LazyStringObjectInspector(boolean escaped, byte escapeChar) {
+        super(PrimitiveObjectInspectorUtils.stringTypeEntry);
+        this.escaped = escaped;
+        this.escapeChar = escapeChar;
+    }
 
-	@Override
-	public Object copyObject(Object o) {
-		return o == null ? null : new LazyString((LazyString) o);
-	}
+    @Override
+    public Object copyObject(Object o) {
+        return o == null ? null : new LazyString((LazyString) o);
+    }
 
-	@Override
-	public Text getPrimitiveWritableObject(Object o) {
-		return o == null ? null : ((LazyString) o).getWritableObject();
-	}
+    @Override
+    public Text getPrimitiveWritableObject(Object o) {
+        return o == null ? null : ((LazyString) o).getWritableObject();
+    }
 
-	@Override
-	public String getPrimitiveJavaObject(Object o) {
-		return o == null ? null : ((LazyString) o).getWritableObject()
-				.toString();
-	}
+    @Override
+    public String getPrimitiveJavaObject(Object o) {
+        return o == null ? null : ((LazyString) o).getWritableObject().toString();
+    }
 
-	public boolean isEscaped() {
-		return escaped;
-	}
+    public boolean isEscaped() {
+        return escaped;
+    }
 
-	public byte getEscapeChar() {
-		return escapeChar;
-	}
+    public byte getEscapeChar() {
+        return escapeChar;
+    }
 
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyVoidObjectInspector.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyVoidObjectInspector.java
index a30f1af..c916191 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyVoidObjectInspector.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyVoidObjectInspector.java
@@ -24,21 +24,20 @@
 /**
  * A WritableVoidObjectInspector inspects a NullWritable Object.
  */
-public class LazyVoidObjectInspector extends
-		AbstractPrimitiveLazyObjectInspector<NullWritable> implements
-		VoidObjectInspector {
+public class LazyVoidObjectInspector extends AbstractPrimitiveLazyObjectInspector<NullWritable> implements
+        VoidObjectInspector {
 
-	LazyVoidObjectInspector() {
-		super(PrimitiveObjectInspectorUtils.voidTypeEntry);
-	}
+    LazyVoidObjectInspector() {
+        super(PrimitiveObjectInspectorUtils.voidTypeEntry);
+    }
 
-	@Override
-	public Object copyObject(Object o) {
-		return o;
-	}
+    @Override
+    public Object copyObject(Object o) {
+        return o;
+    }
 
-	@Override
-	public Object getPrimitiveJavaObject(Object o) {
-		throw new RuntimeException("Internal error: cannot create Void object.");
-	}
+    @Override
+    public Object getPrimitiveJavaObject(Object o) {
+        throw new RuntimeException("Internal error: cannot create Void object.");
+    }
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/PrimitiveObjectInspectorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
index e70bdb9..33f0e51 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
@@ -24,7 +24,6 @@
 /**

  * PrimitiveObjectInspectorFactory is the primary way to create new

  * PrimitiveObjectInspector instances.

- * 

  * The reason of having caches here is that ObjectInspector is because

  * ObjectInspectors do not have an internal state - so ObjectInspectors with the

  * same construction parameters should result in exactly the same

@@ -32,58 +31,46 @@
  */

 public final class PrimitiveObjectInspectorFactory {

 

-	public static final LazyBooleanObjectInspector LazyBooleanObjectInspector = new LazyBooleanObjectInspector();

-	public static final LazyByteObjectInspector LazyByteObjectInspector = new LazyByteObjectInspector();

-	public static final LazyShortObjectInspector LazyShortObjectInspector = new LazyShortObjectInspector();

-	public static final LazyIntObjectInspector LazyIntObjectInspector = new LazyIntObjectInspector();

-	public static final LazyLongObjectInspector LazyLongObjectInspector = new LazyLongObjectInspector();

-	public static final LazyFloatObjectInspector LazyFloatObjectInspector = new LazyFloatObjectInspector();

-	public static final LazyDoubleObjectInspector LazyDoubleObjectInspector = new LazyDoubleObjectInspector();

-	public static final LazyStringObjectInspector LazyStringObjectInspector = new LazyStringObjectInspector(

-			false, (byte) '\\');

-	public static final LazyVoidObjectInspector LazyVoidObjectInspector = new LazyVoidObjectInspector();

+    public static final LazyBooleanObjectInspector LazyBooleanObjectInspector = new LazyBooleanObjectInspector();

+    public static final LazyByteObjectInspector LazyByteObjectInspector = new LazyByteObjectInspector();

+    public static final LazyShortObjectInspector LazyShortObjectInspector = new LazyShortObjectInspector();

+    public static final LazyIntObjectInspector LazyIntObjectInspector = new LazyIntObjectInspector();

+    public static final LazyLongObjectInspector LazyLongObjectInspector = new LazyLongObjectInspector();

+    public static final LazyFloatObjectInspector LazyFloatObjectInspector = new LazyFloatObjectInspector();

+    public static final LazyDoubleObjectInspector LazyDoubleObjectInspector = new LazyDoubleObjectInspector();

+    public static final LazyStringObjectInspector LazyStringObjectInspector = new LazyStringObjectInspector(false,

+            (byte) '\\');

+    public static final LazyVoidObjectInspector LazyVoidObjectInspector = new LazyVoidObjectInspector();

 

-	private static HashMap<PrimitiveCategory, AbstractPrimitiveLazyObjectInspector<?>> cachedPrimitiveLazyInspectorCache = new HashMap<PrimitiveCategory, AbstractPrimitiveLazyObjectInspector<?>>();

+    private static HashMap<PrimitiveCategory, AbstractPrimitiveLazyObjectInspector<?>> cachedPrimitiveLazyInspectorCache = new HashMap<PrimitiveCategory, AbstractPrimitiveLazyObjectInspector<?>>();

 

-	static {

-		cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.BOOLEAN,

-				LazyBooleanObjectInspector);

-		cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.BYTE,

-				LazyByteObjectInspector);

-		cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.SHORT,

-				LazyShortObjectInspector);

-		cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.INT,

-				LazyIntObjectInspector);

-		cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.LONG,

-				LazyLongObjectInspector);

-		cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.FLOAT,

-				LazyFloatObjectInspector);

-		cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.DOUBLE,

-				LazyDoubleObjectInspector);

-		cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.STRING,

-				LazyStringObjectInspector);

-		cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.VOID,

-				LazyVoidObjectInspector);

-	}

+    static {

+        cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.BOOLEAN, LazyBooleanObjectInspector);

+        cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.BYTE, LazyByteObjectInspector);

+        cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.SHORT, LazyShortObjectInspector);

+        cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.INT, LazyIntObjectInspector);

+        cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.LONG, LazyLongObjectInspector);

+        cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.FLOAT, LazyFloatObjectInspector);

+        cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.DOUBLE, LazyDoubleObjectInspector);

+        cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.STRING, LazyStringObjectInspector);

+        cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.VOID, LazyVoidObjectInspector);

+    }

 

-	/**

-	 * Returns the PrimitiveWritableObjectInspector for the PrimitiveCategory.

-	 * 

-	 * @param primitiveCategory

-	 */

-	public static AbstractPrimitiveLazyObjectInspector<?> getPrimitiveLazyObjectInspector(

-			PrimitiveCategory primitiveCategory) {

-		AbstractPrimitiveLazyObjectInspector<?> result = cachedPrimitiveLazyInspectorCache

-				.get(primitiveCategory);

-		if (result == null) {

-			throw new RuntimeException(

-					"Internal error: Cannot find ObjectInspector " + " for "

-							+ primitiveCategory);

-		}

-		return result;

-	}

+    /**

+     * Returns the PrimitiveWritableObjectInspector for the PrimitiveCategory.

+     * 

+     * @param primitiveCategory

+     */

+    public static AbstractPrimitiveLazyObjectInspector<?> getPrimitiveLazyObjectInspector(

+            PrimitiveCategory primitiveCategory) {

+        AbstractPrimitiveLazyObjectInspector<?> result = cachedPrimitiveLazyInspectorCache.get(primitiveCategory);

+        if (result == null) {

+            throw new RuntimeException("Internal error: Cannot find ObjectInspector " + " for " + primitiveCategory);

+        }

+        return result;

+    }

 

-	private PrimitiveObjectInspectorFactory() {

-		// prevent instantiation

-	}

+    private PrimitiveObjectInspectorFactory() {

+        // prevent instantiation

+    }

 }

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/parser/IHiveParser.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/parser/IHiveParser.java
index aeea68f..7830c52 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/parser/IHiveParser.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/parser/IHiveParser.java
@@ -5,13 +5,12 @@
 import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 
 public interface IHiveParser {
-	/**
-	 * parse one hive rwo into
-	 * 
-	 * @param row
-	 * @param objectInspector
-	 * @param tb
-	 */
-	public void parse(byte[] data, int start, int length, ArrayTupleBuilder tb)
-			throws IOException;
+    /**
+     * parse one hive rwo into
+     * 
+     * @param row
+     * @param objectInspector
+     * @param tb
+     */
+    public void parse(byte[] data, int start, int length, ArrayTupleBuilder tb) throws IOException;
 }
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/parser/TextToBinaryTupleParser.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/parser/TextToBinaryTupleParser.java
index 3aeb058..38e1b36 100644
--- a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/parser/TextToBinaryTupleParser.java
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/parser/TextToBinaryTupleParser.java
@@ -19,166 +19,156 @@
 import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 
 public class TextToBinaryTupleParser implements IHiveParser {
-	private int[] invertedIndex;
-	private int[] fieldEnds;
-	private int lastNecessaryFieldIndex;
-	private LazySimpleStructObjectInspector inputObjectInspector;
-	private List<? extends StructField> fieldRefs;
+    private int[] invertedIndex;
+    private int[] fieldEnds;
+    private int lastNecessaryFieldIndex;
+    private LazySimpleStructObjectInspector inputObjectInspector;
+    private List<? extends StructField> fieldRefs;
 
-	public TextToBinaryTupleParser(int[] outputColumnsOffset,
-			ObjectInspector structInspector) {
-		int size = 0;
-		for (int i = 0; i < outputColumnsOffset.length; i++)
-			if (outputColumnsOffset[i] >= 0)
-				size++;
-		invertedIndex = new int[size];
-		for (int i = 0; i < outputColumnsOffset.length; i++)
-			if (outputColumnsOffset[i] >= 0) {
-				invertedIndex[outputColumnsOffset[i]] = i;
-				lastNecessaryFieldIndex = i;
-			}
-		fieldEnds = new int[outputColumnsOffset.length];
-		for (int i = 0; i < fieldEnds.length; i++)
-			fieldEnds[i] = 0;
-		inputObjectInspector = (LazySimpleStructObjectInspector) structInspector;
-		fieldRefs = inputObjectInspector.getAllStructFieldRefs();
-	}
+    public TextToBinaryTupleParser(int[] outputColumnsOffset, ObjectInspector structInspector) {
+        int size = 0;
+        for (int i = 0; i < outputColumnsOffset.length; i++)
+            if (outputColumnsOffset[i] >= 0)
+                size++;
+        invertedIndex = new int[size];
+        for (int i = 0; i < outputColumnsOffset.length; i++)
+            if (outputColumnsOffset[i] >= 0) {
+                invertedIndex[outputColumnsOffset[i]] = i;
+                lastNecessaryFieldIndex = i;
+            }
+        fieldEnds = new int[outputColumnsOffset.length];
+        for (int i = 0; i < fieldEnds.length; i++)
+            fieldEnds[i] = 0;
+        inputObjectInspector = (LazySimpleStructObjectInspector) structInspector;
+        fieldRefs = inputObjectInspector.getAllStructFieldRefs();
+    }
 
-	@Override
-	public void parse(byte[] bytes, int start, int length, ArrayTupleBuilder tb)
-			throws IOException {
-		byte separator = inputObjectInspector.getSeparator();
-		boolean lastColumnTakesRest = inputObjectInspector
-				.getLastColumnTakesRest();
-		boolean isEscaped = inputObjectInspector.isEscaped();
-		byte escapeChar = inputObjectInspector.getEscapeChar();
-		DataOutput output = tb.getDataOutput();
+    @Override
+    public void parse(byte[] bytes, int start, int length, ArrayTupleBuilder tb) throws IOException {
+        byte separator = inputObjectInspector.getSeparator();
+        boolean lastColumnTakesRest = inputObjectInspector.getLastColumnTakesRest();
+        boolean isEscaped = inputObjectInspector.isEscaped();
+        byte escapeChar = inputObjectInspector.getEscapeChar();
+        DataOutput output = tb.getDataOutput();
 
-		int structByteEnd = start + length - 1;
-		int fieldId = 0;
-		int fieldByteEnd = start;
+        int structByteEnd = start + length - 1;
+        int fieldId = 0;
+        int fieldByteEnd = start;
 
-		// Go through all bytes in the byte[]
-		while (fieldByteEnd <= structByteEnd
-				&& fieldId <= lastNecessaryFieldIndex) {
-			if (fieldByteEnd == structByteEnd
-					|| bytes[fieldByteEnd] == separator) {
-				// Reached the end of a field?
-				if (lastColumnTakesRest && fieldId == fieldEnds.length - 1) {
-					fieldByteEnd = structByteEnd;
-				}
-				fieldEnds[fieldId] = fieldByteEnd;
-				if (fieldId == fieldEnds.length - 1
-						|| fieldByteEnd == structByteEnd) {
-					// for the case of null fields
-					for (int i = fieldId; i < fieldEnds.length; i++) {
-						fieldEnds[i] = fieldByteEnd;
-					}
-					break;
-				}
-				fieldByteEnd++;
-				fieldId++;
-			} else {
-				if (isEscaped && bytes[fieldByteEnd] == escapeChar
-						&& fieldByteEnd + 1 < structByteEnd) {
-					// ignore the char after escape_char
-					fieldByteEnd += 2;
-				} else {
-					fieldByteEnd++;
-				}
-			}
-		}
+        // Go through all bytes in the byte[]
+        while (fieldByteEnd <= structByteEnd && fieldId <= lastNecessaryFieldIndex) {
+            if (fieldByteEnd == structByteEnd || bytes[fieldByteEnd] == separator) {
+                // Reached the end of a field?
+                if (lastColumnTakesRest && fieldId == fieldEnds.length - 1) {
+                    fieldByteEnd = structByteEnd;
+                }
+                fieldEnds[fieldId] = fieldByteEnd;
+                if (fieldId == fieldEnds.length - 1 || fieldByteEnd == structByteEnd) {
+                    // for the case of null fields
+                    for (int i = fieldId; i < fieldEnds.length; i++) {
+                        fieldEnds[i] = fieldByteEnd;
+                    }
+                    break;
+                }
+                fieldByteEnd++;
+                fieldId++;
+            } else {
+                if (isEscaped && bytes[fieldByteEnd] == escapeChar && fieldByteEnd + 1 < structByteEnd) {
+                    // ignore the char after escape_char
+                    fieldByteEnd += 2;
+                } else {
+                    fieldByteEnd++;
+                }
+            }
+        }
 
-		for (int i = 0; i < invertedIndex.length; i++) {
-			int index = invertedIndex[i];
-			StructField fieldRef = fieldRefs.get(index);
-			ObjectInspector inspector = fieldRef.getFieldObjectInspector();
-			Category category = inspector.getCategory();
-			int fieldStart = index == 0 ? 0 : fieldEnds[index - 1] + 1;
-			int fieldEnd = fieldEnds[index];
-			if (bytes[fieldEnd] == separator)
-				fieldEnd--;
-			int fieldLen = fieldEnd - fieldStart + 1;
-			switch (category) {
-			case PRIMITIVE:
-				PrimitiveObjectInspector poi = (PrimitiveObjectInspector) inspector;
-				switch (poi.getPrimitiveCategory()) {
-				case VOID: {
-					break;
-				}
-				case BOOLEAN: {
-					output.write(bytes[fieldStart]);
-					break;
-				}
-				case BYTE: {
-					output.write(bytes[fieldStart]);
-					break;
-				}
-				case SHORT: {
-					short v = LazyShort.parseShort(bytes, fieldStart, fieldLen);
-					output.write((byte) (v >> 8));
-					output.write((byte) (v));
-					break;
-				}
-				case INT: {
-					int v = LazyInteger.parseInt(bytes, fieldStart, fieldLen);
-					LazyUtils.writeVInt(output, v);
-					break;
-				}
-				case LONG: {
-					long v = LazyLong.parseLong(bytes, fieldStart, fieldLen);
-					LazyUtils.writeVLong(output, v);
-					break;
-				}
-				case FLOAT: {
-					float value = Float.parseFloat(Text.decode(bytes,
-							fieldStart, fieldLen));
-					int v = Float.floatToIntBits(value);
-					output.write((byte) (v >> 24));
-					output.write((byte) (v >> 16));
-					output.write((byte) (v >> 8));
-					output.write((byte) (v));
-					break;
-				}
-				case DOUBLE: {
-					try {
-						double value = Double.parseDouble(Text.decode(bytes,
-								fieldStart, fieldLen));
-						long v = Double.doubleToLongBits(value);
-						output.write((byte) (v >> 56));
-						output.write((byte) (v >> 48));
-						output.write((byte) (v >> 40));
-						output.write((byte) (v >> 32));
-						output.write((byte) (v >> 24));
-						output.write((byte) (v >> 16));
-						output.write((byte) (v >> 8));
-						output.write((byte) (v));
-					} catch (NumberFormatException e) {
-						throw e;
-					}
-					break;
-				}
-				case STRING: {
-					LazyUtils.writeVInt(output, fieldLen);
-					output.write(bytes, fieldStart, fieldLen);
-					break;
-				}
-				default: {
-					throw new RuntimeException("Unrecognized type: "
-							+ poi.getPrimitiveCategory());
-				}
-				}
-				break;
-			case STRUCT:
-				throw new NotImplementedException("Unrecognized type: struct ");
-			case LIST:
-				throw new NotImplementedException("Unrecognized type: struct ");
-			case MAP:
-				throw new NotImplementedException("Unrecognized type: struct ");
-			case UNION:
-				throw new NotImplementedException("Unrecognized type: struct ");
-			}
-			tb.addFieldEndOffset();
-		}
-	}
+        for (int i = 0; i < invertedIndex.length; i++) {
+            int index = invertedIndex[i];
+            StructField fieldRef = fieldRefs.get(index);
+            ObjectInspector inspector = fieldRef.getFieldObjectInspector();
+            Category category = inspector.getCategory();
+            int fieldStart = index == 0 ? 0 : fieldEnds[index - 1] + 1;
+            int fieldEnd = fieldEnds[index];
+            if (bytes[fieldEnd] == separator)
+                fieldEnd--;
+            int fieldLen = fieldEnd - fieldStart + 1;
+            switch (category) {
+                case PRIMITIVE:
+                    PrimitiveObjectInspector poi = (PrimitiveObjectInspector) inspector;
+                    switch (poi.getPrimitiveCategory()) {
+                        case VOID: {
+                            break;
+                        }
+                        case BOOLEAN: {
+                            output.write(bytes[fieldStart]);
+                            break;
+                        }
+                        case BYTE: {
+                            output.write(bytes[fieldStart]);
+                            break;
+                        }
+                        case SHORT: {
+                            short v = LazyShort.parseShort(bytes, fieldStart, fieldLen);
+                            output.write((byte) (v >> 8));
+                            output.write((byte) (v));
+                            break;
+                        }
+                        case INT: {
+                            int v = LazyInteger.parseInt(bytes, fieldStart, fieldLen);
+                            LazyUtils.writeVInt(output, v);
+                            break;
+                        }
+                        case LONG: {
+                            long v = LazyLong.parseLong(bytes, fieldStart, fieldLen);
+                            LazyUtils.writeVLong(output, v);
+                            break;
+                        }
+                        case FLOAT: {
+                            float value = Float.parseFloat(Text.decode(bytes, fieldStart, fieldLen));
+                            int v = Float.floatToIntBits(value);
+                            output.write((byte) (v >> 24));
+                            output.write((byte) (v >> 16));
+                            output.write((byte) (v >> 8));
+                            output.write((byte) (v));
+                            break;
+                        }
+                        case DOUBLE: {
+                            try {
+                                double value = Double.parseDouble(Text.decode(bytes, fieldStart, fieldLen));
+                                long v = Double.doubleToLongBits(value);
+                                output.write((byte) (v >> 56));
+                                output.write((byte) (v >> 48));
+                                output.write((byte) (v >> 40));
+                                output.write((byte) (v >> 32));
+                                output.write((byte) (v >> 24));
+                                output.write((byte) (v >> 16));
+                                output.write((byte) (v >> 8));
+                                output.write((byte) (v));
+                            } catch (NumberFormatException e) {
+                                throw e;
+                            }
+                            break;
+                        }
+                        case STRING: {
+                            LazyUtils.writeVInt(output, fieldLen);
+                            output.write(bytes, fieldStart, fieldLen);
+                            break;
+                        }
+                        default: {
+                            throw new RuntimeException("Unrecognized type: " + poi.getPrimitiveCategory());
+                        }
+                    }
+                    break;
+                case STRUCT:
+                    throw new NotImplementedException("Unrecognized type: struct ");
+                case LIST:
+                    throw new NotImplementedException("Unrecognized type: struct ");
+                case MAP:
+                    throw new NotImplementedException("Unrecognized type: struct ");
+                case UNION:
+                    throw new NotImplementedException("Unrecognized type: struct ");
+            }
+            tb.addFieldEndOffset();
+        }
+    }
 }
diff --git a/hivesterix/src/main/java/org/apache/hadoop/hive/ql/Driver.java b/hivesterix/src/main/java/org/apache/hadoop/hive/ql/Driver.java
index 57e2cc0..a385742 100644
--- a/hivesterix/src/main/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/hivesterix/src/main/java/org/apache/hadoop/hive/ql/Driver.java
@@ -116,1326 +116,1195 @@
 @SuppressWarnings({ "deprecation", "unused" })
 public class Driver implements CommandProcessor {
 
-	static final private Log LOG = LogFactory.getLog(Driver.class.getName());
-	static final private LogHelper console = new LogHelper(LOG);
+    static final private Log LOG = LogFactory.getLog(Driver.class.getName());
+    static final private LogHelper console = new LogHelper(LOG);
 
-	// hive-sterix
-	private IExecutionEngine engine;
-	private boolean hivesterix = false;
+    // hive-sterix
+    private IExecutionEngine engine;
+    private boolean hivesterix = false;
 
-	private int maxRows = 100;
-	ByteStream.Output bos = new ByteStream.Output();
+    private int maxRows = 100;
+    ByteStream.Output bos = new ByteStream.Output();
 
-	private HiveConf conf;
-	private DataInput resStream;
-	private Context ctx;
-	private QueryPlan plan;
-	private Schema schema;
-	private HiveLockManager hiveLockMgr;
+    private HiveConf conf;
+    private DataInput resStream;
+    private Context ctx;
+    private QueryPlan plan;
+    private Schema schema;
+    private HiveLockManager hiveLockMgr;
 
-	private String errorMessage;
-	private String SQLState;
+    private String errorMessage;
+    private String SQLState;
 
-	// A limit on the number of threads that can be launched
-	private int maxthreads;
-	private final int sleeptime = 2000;
+    // A limit on the number of threads that can be launched
+    private int maxthreads;
+    private final int sleeptime = 2000;
 
-	protected int tryCount = Integer.MAX_VALUE;
+    protected int tryCount = Integer.MAX_VALUE;
 
-	private int checkLockManager() {
-		boolean supportConcurrency = conf
-				.getBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY);
-		if (supportConcurrency && (hiveLockMgr == null)) {
-			try {
-				setLockManager();
-			} catch (SemanticException e) {
-				errorMessage = "FAILED: Error in semantic analysis: "
-						+ e.getMessage();
-				SQLState = ErrorMsg.findSQLState(e.getMessage());
-				console.printError(
-						errorMessage,
-						"\n"
-								+ org.apache.hadoop.util.StringUtils
-										.stringifyException(e));
-				return (12);
-			}
-		}
-		return (0);
-	}
+    private int checkLockManager() {
+        boolean supportConcurrency = conf.getBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY);
+        if (supportConcurrency && (hiveLockMgr == null)) {
+            try {
+                setLockManager();
+            } catch (SemanticException e) {
+                errorMessage = "FAILED: Error in semantic analysis: " + e.getMessage();
+                SQLState = ErrorMsg.findSQLState(e.getMessage());
+                console.printError(errorMessage, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
+                return (12);
+            }
+        }
+        return (0);
+    }
 
-	private void setLockManager() throws SemanticException {
-		boolean supportConcurrency = conf
-				.getBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY);
-		if (supportConcurrency) {
-			String lockMgr = conf.getVar(HiveConf.ConfVars.HIVE_LOCK_MANAGER);
-			if ((lockMgr == null) || (lockMgr.isEmpty())) {
-				throw new SemanticException(
-						ErrorMsg.LOCKMGR_NOT_SPECIFIED.getMsg());
-			}
+    private void setLockManager() throws SemanticException {
+        boolean supportConcurrency = conf.getBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY);
+        if (supportConcurrency) {
+            String lockMgr = conf.getVar(HiveConf.ConfVars.HIVE_LOCK_MANAGER);
+            if ((lockMgr == null) || (lockMgr.isEmpty())) {
+                throw new SemanticException(ErrorMsg.LOCKMGR_NOT_SPECIFIED.getMsg());
+            }
 
-			try {
-				hiveLockMgr = (HiveLockManager) ReflectionUtils.newInstance(
-						conf.getClassByName(lockMgr), conf);
-				hiveLockMgr.setContext(new HiveLockManagerCtx(conf));
-			} catch (Exception e) {
-				throw new SemanticException(
-						ErrorMsg.LOCKMGR_NOT_INITIALIZED.getMsg()
-								+ e.getMessage());
-			}
-		}
-	}
+            try {
+                hiveLockMgr = (HiveLockManager) ReflectionUtils.newInstance(conf.getClassByName(lockMgr), conf);
+                hiveLockMgr.setContext(new HiveLockManagerCtx(conf));
+            } catch (Exception e) {
+                throw new SemanticException(ErrorMsg.LOCKMGR_NOT_INITIALIZED.getMsg() + e.getMessage());
+            }
+        }
+    }
 
-	public void init() {
-		Operator.resetId();
-	}
+    public void init() {
+        Operator.resetId();
+    }
 
-	/**
-	 * Return the status information about the Map-Reduce cluster
-	 */
-	public ClusterStatus getClusterStatus() throws Exception {
-		ClusterStatus cs;
-		try {
-			JobConf job = new JobConf(conf, ExecDriver.class);
-			JobClient jc = new JobClient(job);
-			cs = jc.getClusterStatus();
-		} catch (Exception e) {
-			e.printStackTrace();
-			throw e;
-		}
-		LOG.info("Returning cluster status: " + cs.toString());
-		return cs;
-	}
+    /**
+     * Return the status information about the Map-Reduce cluster
+     */
+    public ClusterStatus getClusterStatus() throws Exception {
+        ClusterStatus cs;
+        try {
+            JobConf job = new JobConf(conf, ExecDriver.class);
+            JobClient jc = new JobClient(job);
+            cs = jc.getClusterStatus();
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw e;
+        }
+        LOG.info("Returning cluster status: " + cs.toString());
+        return cs;
+    }
 
-	public Schema getSchema() {
-		return schema;
-	}
+    public Schema getSchema() {
+        return schema;
+    }
 
-	/**
-	 * Get a Schema with fields represented with native Hive types
-	 */
-	public static Schema getSchema(BaseSemanticAnalyzer sem, HiveConf conf) {
-		Schema schema = null;
+    /**
+     * Get a Schema with fields represented with native Hive types
+     */
+    public static Schema getSchema(BaseSemanticAnalyzer sem, HiveConf conf) {
+        Schema schema = null;
 
-		// If we have a plan, prefer its logical result schema if it's
-		// available; otherwise, try digging out a fetch task; failing that,
-		// give up.
-		if (sem == null) {
-			// can't get any info without a plan
-		} else if (sem.getResultSchema() != null) {
-			List<FieldSchema> lst = sem.getResultSchema();
-			schema = new Schema(lst, null);
-		} else if (sem.getFetchTask() != null) {
-			FetchTask ft = sem.getFetchTask();
-			TableDesc td = ft.getTblDesc();
-			// partitioned tables don't have tableDesc set on the FetchTask.
-			// Instead
-			// they have a list of PartitionDesc objects, each with a table
-			// desc.
-			// Let's
-			// try to fetch the desc for the first partition and use it's
-			// deserializer.
-			if (td == null && ft.getWork() != null
-					&& ft.getWork().getPartDesc() != null) {
-				if (ft.getWork().getPartDesc().size() > 0) {
-					td = ft.getWork().getPartDesc().get(0).getTableDesc();
-				}
-			}
+        // If we have a plan, prefer its logical result schema if it's
+        // available; otherwise, try digging out a fetch task; failing that,
+        // give up.
+        if (sem == null) {
+            // can't get any info without a plan
+        } else if (sem.getResultSchema() != null) {
+            List<FieldSchema> lst = sem.getResultSchema();
+            schema = new Schema(lst, null);
+        } else if (sem.getFetchTask() != null) {
+            FetchTask ft = sem.getFetchTask();
+            TableDesc td = ft.getTblDesc();
+            // partitioned tables don't have tableDesc set on the FetchTask.
+            // Instead
+            // they have a list of PartitionDesc objects, each with a table
+            // desc.
+            // Let's
+            // try to fetch the desc for the first partition and use it's
+            // deserializer.
+            if (td == null && ft.getWork() != null && ft.getWork().getPartDesc() != null) {
+                if (ft.getWork().getPartDesc().size() > 0) {
+                    td = ft.getWork().getPartDesc().get(0).getTableDesc();
+                }
+            }
 
-			if (td == null) {
-				LOG.info("No returning schema.");
-			} else {
-				String tableName = "result";
-				List<FieldSchema> lst = null;
-				try {
-					lst = MetaStoreUtils.getFieldsFromDeserializer(tableName,
-							td.getDeserializer());
-				} catch (Exception e) {
-					LOG.warn("Error getting schema: "
-							+ org.apache.hadoop.util.StringUtils
-									.stringifyException(e));
-				}
-				if (lst != null) {
-					schema = new Schema(lst, null);
-				}
-			}
-		}
-		if (schema == null) {
-			schema = new Schema();
-		}
-		LOG.info("Returning Hive schema: " + schema);
-		return schema;
-	}
+            if (td == null) {
+                LOG.info("No returning schema.");
+            } else {
+                String tableName = "result";
+                List<FieldSchema> lst = null;
+                try {
+                    lst = MetaStoreUtils.getFieldsFromDeserializer(tableName, td.getDeserializer());
+                } catch (Exception e) {
+                    LOG.warn("Error getting schema: " + org.apache.hadoop.util.StringUtils.stringifyException(e));
+                }
+                if (lst != null) {
+                    schema = new Schema(lst, null);
+                }
+            }
+        }
+        if (schema == null) {
+            schema = new Schema();
+        }
+        LOG.info("Returning Hive schema: " + schema);
+        return schema;
+    }
 
-	/**
-	 * Get a Schema with fields represented with Thrift DDL types
-	 */
-	public Schema getThriftSchema() throws Exception {
-		Schema schema;
-		try {
-			schema = getSchema();
-			if (schema != null) {
-				List<FieldSchema> lst = schema.getFieldSchemas();
-				// Go over the schema and convert type to thrift type
-				if (lst != null) {
-					for (FieldSchema f : lst) {
-						f.setType(MetaStoreUtils.typeToThriftType(f.getType()));
-					}
-				}
-			}
-		} catch (Exception e) {
-			e.printStackTrace();
-			throw e;
-		}
-		LOG.info("Returning Thrift schema: " + schema);
-		return schema;
-	}
+    /**
+     * Get a Schema with fields represented with Thrift DDL types
+     */
+    public Schema getThriftSchema() throws Exception {
+        Schema schema;
+        try {
+            schema = getSchema();
+            if (schema != null) {
+                List<FieldSchema> lst = schema.getFieldSchemas();
+                // Go over the schema and convert type to thrift type
+                if (lst != null) {
+                    for (FieldSchema f : lst) {
+                        f.setType(MetaStoreUtils.typeToThriftType(f.getType()));
+                    }
+                }
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw e;
+        }
+        LOG.info("Returning Thrift schema: " + schema);
+        return schema;
+    }
 
-	/**
-	 * Return the maximum number of rows returned by getResults
-	 */
-	public int getMaxRows() {
-		return maxRows;
-	}
+    /**
+     * Return the maximum number of rows returned by getResults
+     */
+    public int getMaxRows() {
+        return maxRows;
+    }
 
-	/**
-	 * Set the maximum number of rows returned by getResults
-	 */
-	public void setMaxRows(int maxRows) {
-		this.maxRows = maxRows;
-	}
+    /**
+     * Set the maximum number of rows returned by getResults
+     */
+    public void setMaxRows(int maxRows) {
+        this.maxRows = maxRows;
+    }
 
-	public boolean hasReduceTasks(List<Task<? extends Serializable>> tasks) {
-		if (tasks == null) {
-			return false;
-		}
+    public boolean hasReduceTasks(List<Task<? extends Serializable>> tasks) {
+        if (tasks == null) {
+            return false;
+        }
 
-		boolean hasReduce = false;
-		for (Task<? extends Serializable> task : tasks) {
-			if (task.hasReduce()) {
-				return true;
-			}
+        boolean hasReduce = false;
+        for (Task<? extends Serializable> task : tasks) {
+            if (task.hasReduce()) {
+                return true;
+            }
 
-			hasReduce = (hasReduce || hasReduceTasks(task.getChildTasks()));
-		}
-		return hasReduce;
-	}
+            hasReduce = (hasReduce || hasReduceTasks(task.getChildTasks()));
+        }
+        return hasReduce;
+    }
 
-	/**
-	 * for backwards compatibility with current tests
-	 */
-	public Driver(HiveConf conf) {
-		this.conf = conf;
+    /**
+     * for backwards compatibility with current tests
+     */
+    public Driver(HiveConf conf) {
+        this.conf = conf;
 
-		// hivesterix
-		engine = new HyracksExecutionEngine(conf);
-	}
+        // hivesterix
+        engine = new HyracksExecutionEngine(conf);
+    }
 
-	public Driver() {
-		if (SessionState.get() != null) {
-			conf = SessionState.get().getConf();
-		}
+    public Driver() {
+        if (SessionState.get() != null) {
+            conf = SessionState.get().getConf();
+        }
 
-		// hivesterix
-		engine = new HyracksExecutionEngine(conf);
-	}
+        // hivesterix
+        engine = new HyracksExecutionEngine(conf);
+    }
 
-	// hivesterix: plan printer
-	public Driver(HiveConf conf, PrintWriter planPrinter) {
-		this.conf = conf;
-		engine = new HyracksExecutionEngine(conf, planPrinter);
-	}
+    // hivesterix: plan printer
+    public Driver(HiveConf conf, PrintWriter planPrinter) {
+        this.conf = conf;
+        engine = new HyracksExecutionEngine(conf, planPrinter);
+    }
 
-	public void clear() {
-		this.hivesterix = false;
-	}
+    public void clear() {
+        this.hivesterix = false;
+    }
 
-	/**
-	 * Compile a new query. Any currently-planned query associated with this
-	 * Driver is discarded.
-	 * 
-	 * @param command
-	 *            The SQL query to compile.
-	 */
-	public int compile(String command) {
-		if (plan != null) {
-			close();
-			plan = null;
-		}
+    /**
+     * Compile a new query. Any currently-planned query associated with this
+     * Driver is discarded.
+     * 
+     * @param command
+     *            The SQL query to compile.
+     */
+    public int compile(String command) {
+        if (plan != null) {
+            close();
+            plan = null;
+        }
 
-		TaskFactory.resetId();
+        TaskFactory.resetId();
 
-		try {
-			command = new VariableSubstitution().substitute(conf, command);
-			ctx = new Context(conf);
+        try {
+            command = new VariableSubstitution().substitute(conf, command);
+            ctx = new Context(conf);
 
-			ParseDriver pd = new ParseDriver();
-			ASTNode tree = pd.parse(command, ctx);
-			tree = ParseUtils.findRootNonNullToken(tree);
+            ParseDriver pd = new ParseDriver();
+            ASTNode tree = pd.parse(command, ctx);
+            tree = ParseUtils.findRootNonNullToken(tree);
 
-			BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(conf, tree);
-			List<AbstractSemanticAnalyzerHook> saHooks = getSemanticAnalyzerHooks();
+            BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(conf, tree);
+            List<AbstractSemanticAnalyzerHook> saHooks = getSemanticAnalyzerHooks();
 
-			// Do semantic analysis and plan generation
-			if (saHooks != null) {
-				HiveSemanticAnalyzerHookContext hookCtx = new HiveSemanticAnalyzerHookContextImpl();
-				hookCtx.setConf(conf);
-				for (AbstractSemanticAnalyzerHook hook : saHooks) {
-					tree = hook.preAnalyze(hookCtx, tree);
-				}
-				sem.analyze(tree, ctx);
-				for (AbstractSemanticAnalyzerHook hook : saHooks) {
-					hook.postAnalyze(hookCtx, sem.getRootTasks());
-				}
-			} else {
-				sem.analyze(tree, ctx);
-			}
+            // Do semantic analysis and plan generation
+            if (saHooks != null) {
+                HiveSemanticAnalyzerHookContext hookCtx = new HiveSemanticAnalyzerHookContextImpl();
+                hookCtx.setConf(conf);
+                for (AbstractSemanticAnalyzerHook hook : saHooks) {
+                    tree = hook.preAnalyze(hookCtx, tree);
+                }
+                sem.analyze(tree, ctx);
+                for (AbstractSemanticAnalyzerHook hook : saHooks) {
+                    hook.postAnalyze(hookCtx, sem.getRootTasks());
+                }
+            } else {
+                sem.analyze(tree, ctx);
+            }
 
-			LOG.info("Semantic Analysis Completed");
+            LOG.info("Semantic Analysis Completed");
 
-			// validate the plan
-			sem.validate();
+            // validate the plan
+            sem.validate();
 
-			plan = new QueryPlan(command, sem);
-			// initialize FetchTask right here
-			if (plan.getFetchTask() != null) {
-				plan.getFetchTask().initialize(conf, plan, null);
-			}
+            plan = new QueryPlan(command, sem);
+            // initialize FetchTask right here
+            if (plan.getFetchTask() != null) {
+                plan.getFetchTask().initialize(conf, plan, null);
+            }
 
-			// get the output schema
-			schema = getSchema(sem, conf);
+            // get the output schema
+            schema = getSchema(sem, conf);
 
-			// test Only - serialize the query plan and deserialize it
-			if (sem instanceof SemanticAnalyzer
-					&& command.toLowerCase().indexOf("create") < 0) {
+            // test Only - serialize the query plan and deserialize it
+            if (sem instanceof SemanticAnalyzer && command.toLowerCase().indexOf("create") < 0) {
 
-				Thread.currentThread().setContextClassLoader(
-						this.getClass().getClassLoader());
+                Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader());
 
-				String queryPlanFileName = ctx.getLocalScratchDir(true)
-						+ Path.SEPARATOR_CHAR + "queryplan.xml";
-				LOG.info("query plan = " + queryPlanFileName);
-				queryPlanFileName = new Path(queryPlanFileName).toUri()
-						.getPath();
+                String queryPlanFileName = ctx.getLocalScratchDir(true) + Path.SEPARATOR_CHAR + "queryplan.xml";
+                LOG.info("query plan = " + queryPlanFileName);
+                queryPlanFileName = new Path(queryPlanFileName).toUri().getPath();
 
-				// serialize the queryPlan
-				FileOutputStream fos = new FileOutputStream(queryPlanFileName);
-				Utilities.serializeQueryPlan(plan, fos);
-				fos.close();
+                // serialize the queryPlan
+                FileOutputStream fos = new FileOutputStream(queryPlanFileName);
+                Utilities.serializeQueryPlan(plan, fos);
+                fos.close();
 
-				// deserialize the queryPlan
-				FileInputStream fis = new FileInputStream(queryPlanFileName);
-				QueryPlan newPlan = Utilities.deserializeQueryPlan(fis, conf);
-				fis.close();
+                // deserialize the queryPlan
+                FileInputStream fis = new FileInputStream(queryPlanFileName);
+                QueryPlan newPlan = Utilities.deserializeQueryPlan(fis, conf);
+                fis.close();
 
-				// Use the deserialized plan
-				plan = newPlan;
-			}
+                // Use the deserialized plan
+                plan = newPlan;
+            }
 
-			// initialize FetchTask right here
-			if (plan.getFetchTask() != null) {
-				plan.getFetchTask().initialize(conf, plan, null);
-			}
+            // initialize FetchTask right here
+            if (plan.getFetchTask() != null) {
+                plan.getFetchTask().initialize(conf, plan, null);
+            }
 
-			// do the authorization check
-			if (HiveConf.getBoolVar(conf,
-					HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED)) {
-				try {
-					// doAuthorization(sem);
-				} catch (AuthorizationException authExp) {
-					console.printError("Authorization failed:"
-							+ authExp.getMessage()
-							+ ". Use show grant to get more details.");
-					return 403;
-				}
-			}
+            // do the authorization check
+            if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED)) {
+                try {
+                    // doAuthorization(sem);
+                } catch (AuthorizationException authExp) {
+                    console.printError("Authorization failed:" + authExp.getMessage()
+                            + ". Use show grant to get more details.");
+                    return 403;
+                }
+            }
 
-			// hyracks run
-			if (sem instanceof SemanticAnalyzer
-					&& command.toLowerCase().indexOf("create") < 0) {
-				hivesterix = true;
-				return engine.compileJob(sem.getRootTasks());
-			}
+            // hyracks run
+            if (sem instanceof SemanticAnalyzer && command.toLowerCase().indexOf("create") < 0) {
+                hivesterix = true;
+                return engine.compileJob(sem.getRootTasks());
+            }
 
-			return 0;
-		} catch (SemanticException e) {
-			errorMessage = "FAILED: Error in semantic analysis: "
-					+ e.getMessage();
-			SQLState = ErrorMsg.findSQLState(e.getMessage());
-			console.printError(errorMessage, "\n"
-					+ org.apache.hadoop.util.StringUtils.stringifyException(e));
-			return (10);
-		} catch (ParseException e) {
-			errorMessage = "FAILED: Parse Error: " + e.getMessage();
-			SQLState = ErrorMsg.findSQLState(e.getMessage());
-			console.printError(errorMessage, "\n"
-					+ org.apache.hadoop.util.StringUtils.stringifyException(e));
-			return (11);
-		} catch (Exception e) {
-			errorMessage = "FAILED: Hive Internal Error: "
-					+ Utilities.getNameMessage(e);
-			SQLState = ErrorMsg.findSQLState(e.getMessage());
-			console.printError(errorMessage + "\n"
-					+ org.apache.hadoop.util.StringUtils.stringifyException(e));
-			return (12);
-		}
-	}
+            return 0;
+        } catch (SemanticException e) {
+            errorMessage = "FAILED: Error in semantic analysis: " + e.getMessage();
+            SQLState = ErrorMsg.findSQLState(e.getMessage());
+            console.printError(errorMessage, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
+            return (10);
+        } catch (ParseException e) {
+            errorMessage = "FAILED: Parse Error: " + e.getMessage();
+            SQLState = ErrorMsg.findSQLState(e.getMessage());
+            console.printError(errorMessage, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
+            return (11);
+        } catch (Exception e) {
+            errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e);
+            SQLState = ErrorMsg.findSQLState(e.getMessage());
+            console.printError(errorMessage + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
+            return (12);
+        }
+    }
 
-	private void doAuthorization(BaseSemanticAnalyzer sem)
-			throws HiveException, AuthorizationException {
-		HashSet<ReadEntity> inputs = sem.getInputs();
-		HashSet<WriteEntity> outputs = sem.getOutputs();
-		SessionState ss = SessionState.get();
-		HiveOperation op = ss.getHiveOperation();
-		Hive db = sem.getDb();
-		if (op != null) {
-			if (op.equals(HiveOperation.CREATETABLE_AS_SELECT)
-					|| op.equals(HiveOperation.CREATETABLE)) {
-				ss.getAuthorizer().authorize(
-						db.getDatabase(db.getCurrentDatabase()),
-						null,
-						HiveOperation.CREATETABLE_AS_SELECT
-								.getOutputRequiredPrivileges());
-			} else {
-				// if (op.equals(HiveOperation.IMPORT)) {
-				// ImportSemanticAnalyzer isa = (ImportSemanticAnalyzer) sem;
-				// if (!isa.existsTable()) {
-				ss.getAuthorizer().authorize(
-						db.getDatabase(db.getCurrentDatabase()),
-						null,
-						HiveOperation.CREATETABLE_AS_SELECT
-								.getOutputRequiredPrivileges());
-				// }
-				// }
-			}
-			if (outputs != null && outputs.size() > 0) {
-				for (WriteEntity write : outputs) {
+    private void doAuthorization(BaseSemanticAnalyzer sem) throws HiveException, AuthorizationException {
+        HashSet<ReadEntity> inputs = sem.getInputs();
+        HashSet<WriteEntity> outputs = sem.getOutputs();
+        SessionState ss = SessionState.get();
+        HiveOperation op = ss.getHiveOperation();
+        Hive db = sem.getDb();
+        if (op != null) {
+            if (op.equals(HiveOperation.CREATETABLE_AS_SELECT) || op.equals(HiveOperation.CREATETABLE)) {
+                ss.getAuthorizer().authorize(db.getDatabase(db.getCurrentDatabase()), null,
+                        HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges());
+            } else {
+                // if (op.equals(HiveOperation.IMPORT)) {
+                // ImportSemanticAnalyzer isa = (ImportSemanticAnalyzer) sem;
+                // if (!isa.existsTable()) {
+                ss.getAuthorizer().authorize(db.getDatabase(db.getCurrentDatabase()), null,
+                        HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges());
+                // }
+                // }
+            }
+            if (outputs != null && outputs.size() > 0) {
+                for (WriteEntity write : outputs) {
 
-					if (write.getType() == WriteEntity.Type.PARTITION) {
-						Partition part = db.getPartition(write.getTable(),
-								write.getPartition().getSpec(), false);
-						if (part != null) {
-							ss.getAuthorizer().authorize(write.getPartition(),
-									null, op.getOutputRequiredPrivileges());
-							continue;
-						}
-					}
+                    if (write.getType() == WriteEntity.Type.PARTITION) {
+                        Partition part = db.getPartition(write.getTable(), write.getPartition().getSpec(), false);
+                        if (part != null) {
+                            ss.getAuthorizer().authorize(write.getPartition(), null, op.getOutputRequiredPrivileges());
+                            continue;
+                        }
+                    }
 
-					if (write.getTable() != null) {
-						ss.getAuthorizer().authorize(write.getTable(), null,
-								op.getOutputRequiredPrivileges());
-					}
-				}
+                    if (write.getTable() != null) {
+                        ss.getAuthorizer().authorize(write.getTable(), null, op.getOutputRequiredPrivileges());
+                    }
+                }
 
-			}
-		}
+            }
+        }
 
-		if (inputs != null && inputs.size() > 0) {
+        if (inputs != null && inputs.size() > 0) {
 
-			Map<Table, List<String>> tab2Cols = new HashMap<Table, List<String>>();
-			Map<Partition, List<String>> part2Cols = new HashMap<Partition, List<String>>();
+            Map<Table, List<String>> tab2Cols = new HashMap<Table, List<String>>();
+            Map<Partition, List<String>> part2Cols = new HashMap<Partition, List<String>>();
 
-			Map<String, Boolean> tableUsePartLevelAuth = new HashMap<String, Boolean>();
-			for (ReadEntity read : inputs) {
-				if (read.getPartition() != null) {
-					Table tbl = read.getTable();
-					String tblName = tbl.getTableName();
-					if (tableUsePartLevelAuth.get(tblName) == null) {
-						boolean usePartLevelPriv = (tbl.getParameters().get(
-								"PARTITION_LEVEL_PRIVILEGE") != null && ("TRUE"
-								.equalsIgnoreCase(tbl.getParameters().get(
-										"PARTITION_LEVEL_PRIVILEGE"))));
-						if (usePartLevelPriv) {
-							tableUsePartLevelAuth.put(tblName, Boolean.TRUE);
-						} else {
-							tableUsePartLevelAuth.put(tblName, Boolean.FALSE);
-						}
-					}
-				}
-			}
+            Map<String, Boolean> tableUsePartLevelAuth = new HashMap<String, Boolean>();
+            for (ReadEntity read : inputs) {
+                if (read.getPartition() != null) {
+                    Table tbl = read.getTable();
+                    String tblName = tbl.getTableName();
+                    if (tableUsePartLevelAuth.get(tblName) == null) {
+                        boolean usePartLevelPriv = (tbl.getParameters().get("PARTITION_LEVEL_PRIVILEGE") != null && ("TRUE"
+                                .equalsIgnoreCase(tbl.getParameters().get("PARTITION_LEVEL_PRIVILEGE"))));
+                        if (usePartLevelPriv) {
+                            tableUsePartLevelAuth.put(tblName, Boolean.TRUE);
+                        } else {
+                            tableUsePartLevelAuth.put(tblName, Boolean.FALSE);
+                        }
+                    }
+                }
+            }
 
-			if (op.equals(HiveOperation.CREATETABLE_AS_SELECT)
-					|| op.equals(HiveOperation.QUERY)) {
-				SemanticAnalyzer querySem = (SemanticAnalyzer) sem;
-				ParseContext parseCtx = querySem.getParseContext();
-				Map<TableScanOperator, Table> tsoTopMap = parseCtx
-						.getTopToTable();
+            if (op.equals(HiveOperation.CREATETABLE_AS_SELECT) || op.equals(HiveOperation.QUERY)) {
+                SemanticAnalyzer querySem = (SemanticAnalyzer) sem;
+                ParseContext parseCtx = querySem.getParseContext();
+                Map<TableScanOperator, Table> tsoTopMap = parseCtx.getTopToTable();
 
-				for (Map.Entry<String, Operator<? extends Serializable>> topOpMap : querySem
-						.getParseContext().getTopOps().entrySet()) {
-					Operator<? extends Serializable> topOp = topOpMap
-							.getValue();
-					if (topOp instanceof TableScanOperator
-							&& tsoTopMap.containsKey(topOp)) {
-						TableScanOperator tableScanOp = (TableScanOperator) topOp;
-						Table tbl = tsoTopMap.get(tableScanOp);
-						List<Integer> neededColumnIds = tableScanOp
-								.getNeededColumnIDs();
-						List<FieldSchema> columns = tbl.getCols();
-						List<String> cols = new ArrayList<String>();
-						if (neededColumnIds != null
-								&& neededColumnIds.size() > 0) {
-							for (int i = 0; i < neededColumnIds.size(); i++) {
-								cols.add(columns.get(neededColumnIds.get(i))
-										.getName());
-							}
-						} else {
-							for (int i = 0; i < columns.size(); i++) {
-								cols.add(columns.get(i).getName());
-							}
-						}
-						if (tbl.isPartitioned()
-								&& tableUsePartLevelAuth
-										.get(tbl.getTableName())) {
-							String alias_id = topOpMap.getKey();
-							PrunedPartitionList partsList = PartitionPruner
-									.prune(parseCtx.getTopToTable().get(topOp),
-											parseCtx.getOpToPartPruner().get(
-													topOp), parseCtx.getConf(),
-											alias_id,
-											parseCtx.getPrunedPartitions());
-							Set<Partition> parts = new HashSet<Partition>();
-							parts.addAll(partsList.getConfirmedPartns());
-							parts.addAll(partsList.getUnknownPartns());
-							for (Partition part : parts) {
-								List<String> existingCols = part2Cols.get(part);
-								if (existingCols == null) {
-									existingCols = new ArrayList<String>();
-								}
-								existingCols.addAll(cols);
-								part2Cols.put(part, existingCols);
-							}
-						} else {
-							List<String> existingCols = tab2Cols.get(tbl);
-							if (existingCols == null) {
-								existingCols = new ArrayList<String>();
-							}
-							existingCols.addAll(cols);
-							tab2Cols.put(tbl, existingCols);
-						}
-					}
-				}
-			}
+                for (Map.Entry<String, Operator<? extends Serializable>> topOpMap : querySem.getParseContext()
+                        .getTopOps().entrySet()) {
+                    Operator<? extends Serializable> topOp = topOpMap.getValue();
+                    if (topOp instanceof TableScanOperator && tsoTopMap.containsKey(topOp)) {
+                        TableScanOperator tableScanOp = (TableScanOperator) topOp;
+                        Table tbl = tsoTopMap.get(tableScanOp);
+                        List<Integer> neededColumnIds = tableScanOp.getNeededColumnIDs();
+                        List<FieldSchema> columns = tbl.getCols();
+                        List<String> cols = new ArrayList<String>();
+                        if (neededColumnIds != null && neededColumnIds.size() > 0) {
+                            for (int i = 0; i < neededColumnIds.size(); i++) {
+                                cols.add(columns.get(neededColumnIds.get(i)).getName());
+                            }
+                        } else {
+                            for (int i = 0; i < columns.size(); i++) {
+                                cols.add(columns.get(i).getName());
+                            }
+                        }
+                        if (tbl.isPartitioned() && tableUsePartLevelAuth.get(tbl.getTableName())) {
+                            String alias_id = topOpMap.getKey();
+                            PrunedPartitionList partsList = PartitionPruner.prune(parseCtx.getTopToTable().get(topOp),
+                                    parseCtx.getOpToPartPruner().get(topOp), parseCtx.getConf(), alias_id,
+                                    parseCtx.getPrunedPartitions());
+                            Set<Partition> parts = new HashSet<Partition>();
+                            parts.addAll(partsList.getConfirmedPartns());
+                            parts.addAll(partsList.getUnknownPartns());
+                            for (Partition part : parts) {
+                                List<String> existingCols = part2Cols.get(part);
+                                if (existingCols == null) {
+                                    existingCols = new ArrayList<String>();
+                                }
+                                existingCols.addAll(cols);
+                                part2Cols.put(part, existingCols);
+                            }
+                        } else {
+                            List<String> existingCols = tab2Cols.get(tbl);
+                            if (existingCols == null) {
+                                existingCols = new ArrayList<String>();
+                            }
+                            existingCols.addAll(cols);
+                            tab2Cols.put(tbl, existingCols);
+                        }
+                    }
+                }
+            }
 
-			// cache the results for table authorization
-			Set<String> tableAuthChecked = new HashSet<String>();
-			for (ReadEntity read : inputs) {
-				Table tbl = null;
-				if (read.getPartition() != null) {
-					tbl = read.getPartition().getTable();
-					// use partition level authorization
-					if (tableUsePartLevelAuth.get(tbl.getTableName())) {
-						List<String> cols = part2Cols.get(read.getPartition());
-						if (cols != null && cols.size() > 0) {
-							ss.getAuthorizer().authorize(
-									read.getPartition().getTable(),
-									read.getPartition(), cols,
-									op.getInputRequiredPrivileges(), null);
-						} else {
-							ss.getAuthorizer().authorize(read.getPartition(),
-									op.getInputRequiredPrivileges(), null);
-						}
-						continue;
-					}
-				} else if (read.getTable() != null) {
-					tbl = read.getTable();
-				}
+            // cache the results for table authorization
+            Set<String> tableAuthChecked = new HashSet<String>();
+            for (ReadEntity read : inputs) {
+                Table tbl = null;
+                if (read.getPartition() != null) {
+                    tbl = read.getPartition().getTable();
+                    // use partition level authorization
+                    if (tableUsePartLevelAuth.get(tbl.getTableName())) {
+                        List<String> cols = part2Cols.get(read.getPartition());
+                        if (cols != null && cols.size() > 0) {
+                            ss.getAuthorizer().authorize(read.getPartition().getTable(), read.getPartition(), cols,
+                                    op.getInputRequiredPrivileges(), null);
+                        } else {
+                            ss.getAuthorizer().authorize(read.getPartition(), op.getInputRequiredPrivileges(), null);
+                        }
+                        continue;
+                    }
+                } else if (read.getTable() != null) {
+                    tbl = read.getTable();
+                }
 
-				// if we reach here, it means it needs to do a table
-				// authorization
-				// check, and the table authorization may already happened
-				// because of other
-				// partitions
-				if (tbl != null
-						&& !tableAuthChecked.contains(tbl.getTableName())) {
-					List<String> cols = tab2Cols.get(tbl);
-					if (cols != null && cols.size() > 0) {
-						ss.getAuthorizer().authorize(tbl, null, cols,
-								op.getInputRequiredPrivileges(), null);
-					} else {
-						ss.getAuthorizer().authorize(tbl,
-								op.getInputRequiredPrivileges(), null);
-					}
-					tableAuthChecked.add(tbl.getTableName());
-				}
-			}
+                // if we reach here, it means it needs to do a table
+                // authorization
+                // check, and the table authorization may already happened
+                // because of other
+                // partitions
+                if (tbl != null && !tableAuthChecked.contains(tbl.getTableName())) {
+                    List<String> cols = tab2Cols.get(tbl);
+                    if (cols != null && cols.size() > 0) {
+                        ss.getAuthorizer().authorize(tbl, null, cols, op.getInputRequiredPrivileges(), null);
+                    } else {
+                        ss.getAuthorizer().authorize(tbl, op.getInputRequiredPrivileges(), null);
+                    }
+                    tableAuthChecked.add(tbl.getTableName());
+                }
+            }
 
-		}
-	}
+        }
+    }
 
-	/**
-	 * @return The current query plan associated with this Driver, if any.
-	 */
-	public QueryPlan getPlan() {
-		return plan;
-	}
+    /**
+     * @return The current query plan associated with this Driver, if any.
+     */
+    public QueryPlan getPlan() {
+        return plan;
+    }
 
-	/**
-	 * @param t
-	 *            The table to be locked
-	 * @param p
-	 *            The partition to be locked
-	 * @param mode
-	 *            The mode of the lock (SHARED/EXCLUSIVE) Get the list of
-	 *            objects to be locked. If a partition needs to be locked (in
-	 *            any mode), all its parents should also be locked in SHARED
-	 *            mode.
-	 **/
-	private List<HiveLockObj> getLockObjects(Table t, Partition p,
-			HiveLockMode mode) throws SemanticException {
-		List<HiveLockObj> locks = new LinkedList<HiveLockObj>();
+    /**
+     * @param t
+     *            The table to be locked
+     * @param p
+     *            The partition to be locked
+     * @param mode
+     *            The mode of the lock (SHARED/EXCLUSIVE) Get the list of
+     *            objects to be locked. If a partition needs to be locked (in
+     *            any mode), all its parents should also be locked in SHARED
+     *            mode.
+     **/
+    private List<HiveLockObj> getLockObjects(Table t, Partition p, HiveLockMode mode) throws SemanticException {
+        List<HiveLockObj> locks = new LinkedList<HiveLockObj>();
 
-		HiveLockObjectData lockData = new HiveLockObjectData(plan.getQueryId(),
-				String.valueOf(System.currentTimeMillis()), "IMPLICIT");
+        HiveLockObjectData lockData = new HiveLockObjectData(plan.getQueryId(), String.valueOf(System
+                .currentTimeMillis()), "IMPLICIT");
 
-		if (t != null) {
-			locks.add(new HiveLockObj(new HiveLockObject(t, lockData), mode));
-			mode = HiveLockMode.SHARED;
-			locks.add(new HiveLockObj(new HiveLockObject(t.getDbName(),
-					lockData), mode));
-			return locks;
-		}
+        if (t != null) {
+            locks.add(new HiveLockObj(new HiveLockObject(t, lockData), mode));
+            mode = HiveLockMode.SHARED;
+            locks.add(new HiveLockObj(new HiveLockObject(t.getDbName(), lockData), mode));
+            return locks;
+        }
 
-		if (p != null) {
-			if (!(p instanceof DummyPartition)) {
-				locks.add(new HiveLockObj(new HiveLockObject(p, lockData), mode));
-			}
+        if (p != null) {
+            if (!(p instanceof DummyPartition)) {
+                locks.add(new HiveLockObj(new HiveLockObject(p, lockData), mode));
+            }
 
-			// All the parents are locked in shared mode
-			mode = HiveLockMode.SHARED;
+            // All the parents are locked in shared mode
+            mode = HiveLockMode.SHARED;
 
-			// For dummy partitions, only partition name is needed
-			String name = p.getName();
+            // For dummy partitions, only partition name is needed
+            String name = p.getName();
 
-			if (p instanceof DummyPartition) {
-				name = p.getName().split("@")[2];
-			}
+            if (p instanceof DummyPartition) {
+                name = p.getName().split("@")[2];
+            }
 
-			String partName = name;
-			String partialName = "";
-			String[] partns = name.split("/");
-			int len = p instanceof DummyPartition ? partns.length
-					: partns.length - 1;
-			for (int idx = 0; idx < len; idx++) {
-				String partn = partns[idx];
-				partialName += partn;
-				try {
-					locks.add(new HiveLockObj(new HiveLockObject(
-							new DummyPartition(p.getTable(), p.getTable()
-									.getDbName()
-									+ "/"
-									+ p.getTable().getTableName()
-									+ "/"
-									+ partialName), lockData), mode));
-					partialName += "/";
-				} catch (HiveException e) {
-					throw new SemanticException(e.getMessage());
-				}
-			}
+            String partName = name;
+            String partialName = "";
+            String[] partns = name.split("/");
+            int len = p instanceof DummyPartition ? partns.length : partns.length - 1;
+            for (int idx = 0; idx < len; idx++) {
+                String partn = partns[idx];
+                partialName += partn;
+                try {
+                    locks.add(new HiveLockObj(new HiveLockObject(new DummyPartition(p.getTable(), p.getTable()
+                            .getDbName() + "/" + p.getTable().getTableName() + "/" + partialName), lockData), mode));
+                    partialName += "/";
+                } catch (HiveException e) {
+                    throw new SemanticException(e.getMessage());
+                }
+            }
 
-			locks.add(new HiveLockObj(
-					new HiveLockObject(p.getTable(), lockData), mode));
-			locks.add(new HiveLockObj(new HiveLockObject(p.getTable()
-					.getDbName(), lockData), mode));
-		}
-		return locks;
-	}
+            locks.add(new HiveLockObj(new HiveLockObject(p.getTable(), lockData), mode));
+            locks.add(new HiveLockObj(new HiveLockObject(p.getTable().getDbName(), lockData), mode));
+        }
+        return locks;
+    }
 
-	/**
-	 * Acquire read and write locks needed by the statement. The list of objects
-	 * to be locked are obtained from he inputs and outputs populated by the
-	 * compiler. The lock acuisition scheme is pretty simple. If all the locks
-	 * cannot be obtained, error out. Deadlock is avoided by making sure that
-	 * the locks are lexicographically sorted.
-	 **/
-	public int acquireReadWriteLocks() {
-		try {
-			int sleepTime = conf
-					.getIntVar(HiveConf.ConfVars.HIVE_LOCK_SLEEP_BETWEEN_RETRIES) * 1000;
-			int numRetries = conf
-					.getIntVar(HiveConf.ConfVars.HIVE_LOCK_NUMRETRIES);
+    /**
+     * Acquire read and write locks needed by the statement. The list of objects
+     * to be locked are obtained from he inputs and outputs populated by the
+     * compiler. The lock acuisition scheme is pretty simple. If all the locks
+     * cannot be obtained, error out. Deadlock is avoided by making sure that
+     * the locks are lexicographically sorted.
+     **/
+    public int acquireReadWriteLocks() {
+        try {
+            int sleepTime = conf.getIntVar(HiveConf.ConfVars.HIVE_LOCK_SLEEP_BETWEEN_RETRIES) * 1000;
+            int numRetries = conf.getIntVar(HiveConf.ConfVars.HIVE_LOCK_NUMRETRIES);
 
-			boolean supportConcurrency = conf
-					.getBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY);
-			if (!supportConcurrency) {
-				return 0;
-			}
+            boolean supportConcurrency = conf.getBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY);
+            if (!supportConcurrency) {
+                return 0;
+            }
 
-			List<HiveLockObj> lockObjects = new ArrayList<HiveLockObj>();
+            List<HiveLockObj> lockObjects = new ArrayList<HiveLockObj>();
 
-			// Sort all the inputs, outputs.
-			// If a lock needs to be acquired on any partition, a read lock
-			// needs to be acquired on all
-			// its parents also
-			for (ReadEntity input : plan.getInputs()) {
-				if (input.getType() == ReadEntity.Type.TABLE) {
-					lockObjects.addAll(getLockObjects(input.getTable(), null,
-							HiveLockMode.SHARED));
-				} else {
-					lockObjects.addAll(getLockObjects(null,
-							input.getPartition(), HiveLockMode.SHARED));
-				}
-			}
+            // Sort all the inputs, outputs.
+            // If a lock needs to be acquired on any partition, a read lock
+            // needs to be acquired on all
+            // its parents also
+            for (ReadEntity input : plan.getInputs()) {
+                if (input.getType() == ReadEntity.Type.TABLE) {
+                    lockObjects.addAll(getLockObjects(input.getTable(), null, HiveLockMode.SHARED));
+                } else {
+                    lockObjects.addAll(getLockObjects(null, input.getPartition(), HiveLockMode.SHARED));
+                }
+            }
 
-			for (WriteEntity output : plan.getOutputs()) {
-				if (output.getTyp() == WriteEntity.Type.TABLE) {
-					lockObjects.addAll(getLockObjects(output.getTable(), null,
-							output.isComplete() ? HiveLockMode.EXCLUSIVE
-									: HiveLockMode.SHARED));
-				} else if (output.getTyp() == WriteEntity.Type.PARTITION) {
-					lockObjects.addAll(getLockObjects(null,
-							output.getPartition(), HiveLockMode.EXCLUSIVE));
-				}
-				// In case of dynamic queries, it is possible to have incomplete
-				// dummy partitions
-				else if (output.getTyp() == WriteEntity.Type.DUMMYPARTITION) {
-					lockObjects.addAll(getLockObjects(null,
-							output.getPartition(), HiveLockMode.SHARED));
-				}
-			}
+            for (WriteEntity output : plan.getOutputs()) {
+                if (output.getTyp() == WriteEntity.Type.TABLE) {
+                    lockObjects.addAll(getLockObjects(output.getTable(), null,
+                            output.isComplete() ? HiveLockMode.EXCLUSIVE : HiveLockMode.SHARED));
+                } else if (output.getTyp() == WriteEntity.Type.PARTITION) {
+                    lockObjects.addAll(getLockObjects(null, output.getPartition(), HiveLockMode.EXCLUSIVE));
+                }
+                // In case of dynamic queries, it is possible to have incomplete
+                // dummy partitions
+                else if (output.getTyp() == WriteEntity.Type.DUMMYPARTITION) {
+                    lockObjects.addAll(getLockObjects(null, output.getPartition(), HiveLockMode.SHARED));
+                }
+            }
 
-			if (lockObjects.isEmpty() && !ctx.isNeedLockMgr()) {
-				return 0;
-			}
+            if (lockObjects.isEmpty() && !ctx.isNeedLockMgr()) {
+                return 0;
+            }
 
-			int ret = checkLockManager();
-			if (ret != 0) {
-				return ret;
-			}
+            int ret = checkLockManager();
+            if (ret != 0) {
+                return ret;
+            }
 
-			HiveLockObjectData lockData = new HiveLockObjectData(
-					plan.getQueryId(), String.valueOf(System
-							.currentTimeMillis()), "IMPLICIT");
+            HiveLockObjectData lockData = new HiveLockObjectData(plan.getQueryId(), String.valueOf(System
+                    .currentTimeMillis()), "IMPLICIT");
 
-			// Lock the database also
-			try {
-				Hive db = Hive.get(conf);
-				lockObjects.add(new HiveLockObj(new HiveLockObject(db
-						.getCurrentDatabase(), lockData), HiveLockMode.SHARED));
-			} catch (HiveException e) {
-				throw new SemanticException(e.getMessage());
-			}
+            // Lock the database also
+            try {
+                Hive db = Hive.get(conf);
+                lockObjects.add(new HiveLockObj(new HiveLockObject(db.getCurrentDatabase(), lockData),
+                        HiveLockMode.SHARED));
+            } catch (HiveException e) {
+                throw new SemanticException(e.getMessage());
+            }
 
-			ctx.setHiveLockMgr(hiveLockMgr);
-			List<HiveLock> hiveLocks = null;
+            ctx.setHiveLockMgr(hiveLockMgr);
+            List<HiveLock> hiveLocks = null;
 
-			int tryNum = 1;
-			do {
+            int tryNum = 1;
+            do {
 
-				// ctx.getHiveLockMgr();
-				// hiveLocks = ctx.getHiveLockMgr().lock(lockObjects, false);
+                // ctx.getHiveLockMgr();
+                // hiveLocks = ctx.getHiveLockMgr().lock(lockObjects, false);
 
-				if (hiveLocks != null) {
-					break;
-				}
+                if (hiveLocks != null) {
+                    break;
+                }
 
-				tryNum++;
-				try {
-					Thread.sleep(sleepTime);
-				} catch (InterruptedException e) {
-				}
-			} while (tryNum < numRetries);
+                tryNum++;
+                try {
+                    Thread.sleep(sleepTime);
+                } catch (InterruptedException e) {
+                }
+            } while (tryNum < numRetries);
 
-			if (hiveLocks == null) {
-				throw new SemanticException(
-						ErrorMsg.LOCK_CANNOT_BE_ACQUIRED.getMsg());
-			} else {
-				ctx.setHiveLocks(hiveLocks);
-			}
+            if (hiveLocks == null) {
+                throw new SemanticException(ErrorMsg.LOCK_CANNOT_BE_ACQUIRED.getMsg());
+            } else {
+                ctx.setHiveLocks(hiveLocks);
+            }
 
-			return (0);
-		} catch (SemanticException e) {
-			errorMessage = "FAILED: Error in acquiring locks: "
-					+ e.getMessage();
-			SQLState = ErrorMsg.findSQLState(e.getMessage());
-			console.printError(errorMessage, "\n"
-					+ org.apache.hadoop.util.StringUtils.stringifyException(e));
-			return (10);
-		} catch (Exception e) {
-			errorMessage = "FAILED: Error in acquiring locks: "
-					+ e.getMessage();
-			SQLState = ErrorMsg.findSQLState(e.getMessage());
-			console.printError(errorMessage, "\n"
-					+ org.apache.hadoop.util.StringUtils.stringifyException(e));
-			return (10);
-		}
-	}
+            return (0);
+        } catch (SemanticException e) {
+            errorMessage = "FAILED: Error in acquiring locks: " + e.getMessage();
+            SQLState = ErrorMsg.findSQLState(e.getMessage());
+            console.printError(errorMessage, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
+            return (10);
+        } catch (Exception e) {
+            errorMessage = "FAILED: Error in acquiring locks: " + e.getMessage();
+            SQLState = ErrorMsg.findSQLState(e.getMessage());
+            console.printError(errorMessage, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
+            return (10);
+        }
+    }
 
-	/**
-	 * Release all the locks acquired implicitly by the statement. Note that the
-	 * locks acquired with 'keepAlive' set to True are not released.
-	 **/
-	private void releaseLocks() {
-		if (ctx != null && ctx.getHiveLockMgr() != null) {
-			try {
-				ctx.getHiveLockMgr().close();
-				ctx.setHiveLocks(null);
-			} catch (LockException e) {
-			}
-		}
-	}
+    /**
+     * Release all the locks acquired implicitly by the statement. Note that the
+     * locks acquired with 'keepAlive' set to True are not released.
+     **/
+    private void releaseLocks() {
+        if (ctx != null && ctx.getHiveLockMgr() != null) {
+            try {
+                ctx.getHiveLockMgr().close();
+                ctx.setHiveLocks(null);
+            } catch (LockException e) {
+            }
+        }
+    }
 
-	/**
-	 * @param hiveLocks
-	 *            list of hive locks to be released Release all the locks
-	 *            specified. If some of the locks have already been released,
-	 *            ignore them
-	 **/
-	private void releaseLocks(List<HiveLock> hiveLocks) {
-		if (hiveLocks != null) {
-			ctx.getHiveLockMgr().releaseLocks(hiveLocks);
-		}
-		ctx.setHiveLocks(null);
-	}
+    /**
+     * @param hiveLocks
+     *            list of hive locks to be released Release all the locks
+     *            specified. If some of the locks have already been released,
+     *            ignore them
+     **/
+    private void releaseLocks(List<HiveLock> hiveLocks) {
+        if (hiveLocks != null) {
+            ctx.getHiveLockMgr().releaseLocks(hiveLocks);
+        }
+        ctx.setHiveLocks(null);
+    }
 
-	public CommandProcessorResponse run(String command) {
-		errorMessage = null;
-		SQLState = null;
+    public CommandProcessorResponse run(String command) {
+        errorMessage = null;
+        SQLState = null;
 
-		int ret = compile(command);
-		if (ret != 0) {
-			// releaseLocks(ctx.getHiveLocks());
-			return new CommandProcessorResponse(ret, errorMessage, SQLState);
-		}
+        int ret = compile(command);
+        if (ret != 0) {
+            // releaseLocks(ctx.getHiveLocks());
+            return new CommandProcessorResponse(ret, errorMessage, SQLState);
+        }
 
-		// ret = acquireReadWriteLocks();
-		if (ret != 0) {
-			// releaseLocks(ctx.getHiveLocks());
-			return new CommandProcessorResponse(ret, errorMessage, SQLState);
-		}
+        // ret = acquireReadWriteLocks();
+        if (ret != 0) {
+            // releaseLocks(ctx.getHiveLocks());
+            return new CommandProcessorResponse(ret, errorMessage, SQLState);
+        }
 
-		ret = execute();
-		if (ret != 0) {
-			// releaseLocks(ctx.getHiveLocks());
-			return new CommandProcessorResponse(ret, errorMessage, SQLState);
-		}
+        ret = execute();
+        if (ret != 0) {
+            // releaseLocks(ctx.getHiveLocks());
+            return new CommandProcessorResponse(ret, errorMessage, SQLState);
+        }
 
-		// releaseLocks(ctx.getHiveLocks());
-		return new CommandProcessorResponse(ret);
-	}
+        // releaseLocks(ctx.getHiveLocks());
+        return new CommandProcessorResponse(ret);
+    }
 
-	private List<AbstractSemanticAnalyzerHook> getSemanticAnalyzerHooks()
-			throws Exception {
-		ArrayList<AbstractSemanticAnalyzerHook> saHooks = new ArrayList<AbstractSemanticAnalyzerHook>();
-		String pestr = conf.getVar(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK);
-		if (pestr == null) {
-			return saHooks;
-		}
-		pestr = pestr.trim();
-		if (pestr.equals("")) {
-			return saHooks;
-		}
+    private List<AbstractSemanticAnalyzerHook> getSemanticAnalyzerHooks() throws Exception {
+        ArrayList<AbstractSemanticAnalyzerHook> saHooks = new ArrayList<AbstractSemanticAnalyzerHook>();
+        String pestr = conf.getVar(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK);
+        if (pestr == null) {
+            return saHooks;
+        }
+        pestr = pestr.trim();
+        if (pestr.equals("")) {
+            return saHooks;
+        }
 
-		String[] peClasses = pestr.split(",");
+        String[] peClasses = pestr.split(",");
 
-		for (String peClass : peClasses) {
-			try {
-				AbstractSemanticAnalyzerHook hook = HiveUtils
-						.getSemanticAnalyzerHook(conf, peClass);
-				saHooks.add(hook);
-			} catch (HiveException e) {
-				console.printError("Pre Exec Hook Class not found:"
-						+ e.getMessage());
-				throw e;
-			}
-		}
+        for (String peClass : peClasses) {
+            try {
+                AbstractSemanticAnalyzerHook hook = HiveUtils.getSemanticAnalyzerHook(conf, peClass);
+                saHooks.add(hook);
+            } catch (HiveException e) {
+                console.printError("Pre Exec Hook Class not found:" + e.getMessage());
+                throw e;
+            }
+        }
 
-		return saHooks;
-	}
+        return saHooks;
+    }
 
-	private List<Hook> getPreExecHooks() throws Exception {
-		ArrayList<Hook> pehooks = new ArrayList<Hook>();
-		String pestr = conf.getVar(HiveConf.ConfVars.PREEXECHOOKS);
-		pestr = pestr.trim();
-		if (pestr.equals("")) {
-			return pehooks;
-		}
+    private List<Hook> getPreExecHooks() throws Exception {
+        ArrayList<Hook> pehooks = new ArrayList<Hook>();
+        String pestr = conf.getVar(HiveConf.ConfVars.PREEXECHOOKS);
+        pestr = pestr.trim();
+        if (pestr.equals("")) {
+            return pehooks;
+        }
 
-		String[] peClasses = pestr.split(",");
+        String[] peClasses = pestr.split(",");
 
-		for (String peClass : peClasses) {
-			try {
-				pehooks.add((Hook) Class.forName(peClass.trim(), true,
-						JavaUtils.getClassLoader()).newInstance());
-			} catch (ClassNotFoundException e) {
-				console.printError("Pre Exec Hook Class not found:"
-						+ e.getMessage());
-				throw e;
-			}
-		}
+        for (String peClass : peClasses) {
+            try {
+                pehooks.add((Hook) Class.forName(peClass.trim(), true, JavaUtils.getClassLoader()).newInstance());
+            } catch (ClassNotFoundException e) {
+                console.printError("Pre Exec Hook Class not found:" + e.getMessage());
+                throw e;
+            }
+        }
 
-		return pehooks;
-	}
+        return pehooks;
+    }
 
-	private List<Hook> getPostExecHooks() throws Exception {
-		ArrayList<Hook> pehooks = new ArrayList<Hook>();
-		String pestr = conf.getVar(HiveConf.ConfVars.POSTEXECHOOKS);
-		pestr = pestr.trim();
-		if (pestr.equals("")) {
-			return pehooks;
-		}
+    private List<Hook> getPostExecHooks() throws Exception {
+        ArrayList<Hook> pehooks = new ArrayList<Hook>();
+        String pestr = conf.getVar(HiveConf.ConfVars.POSTEXECHOOKS);
+        pestr = pestr.trim();
+        if (pestr.equals("")) {
+            return pehooks;
+        }
 
-		String[] peClasses = pestr.split(",");
+        String[] peClasses = pestr.split(",");
 
-		for (String peClass : peClasses) {
-			try {
-				pehooks.add((Hook) Class.forName(peClass.trim(), true,
-						JavaUtils.getClassLoader()).newInstance());
-			} catch (ClassNotFoundException e) {
-				console.printError("Post Exec Hook Class not found:"
-						+ e.getMessage());
-				throw e;
-			}
-		}
+        for (String peClass : peClasses) {
+            try {
+                pehooks.add((Hook) Class.forName(peClass.trim(), true, JavaUtils.getClassLoader()).newInstance());
+            } catch (ClassNotFoundException e) {
+                console.printError("Post Exec Hook Class not found:" + e.getMessage());
+                throw e;
+            }
+        }
 
-		return pehooks;
-	}
+        return pehooks;
+    }
 
-	public int execute() {
-		// execute hivesterix plan
-		if (hivesterix) {
-			hivesterix = false;
-			int ret = engine.executeJob();
-			if (ret != 0)
-				return ret;
-		}
+    public int execute() {
+        // execute hivesterix plan
+        if (hivesterix) {
+            hivesterix = false;
+            int ret = engine.executeJob();
+            if (ret != 0)
+                return ret;
+        }
 
-		boolean noName = StringUtils.isEmpty(conf
-				.getVar(HiveConf.ConfVars.HADOOPJOBNAME));
-		int maxlen = conf.getIntVar(HiveConf.ConfVars.HIVEJOBNAMELENGTH);
+        boolean noName = StringUtils.isEmpty(conf.getVar(HiveConf.ConfVars.HADOOPJOBNAME));
+        int maxlen = conf.getIntVar(HiveConf.ConfVars.HIVEJOBNAMELENGTH);
 
-		String queryId = plan.getQueryId();
-		String queryStr = plan.getQueryStr();
+        String queryId = plan.getQueryId();
+        String queryStr = plan.getQueryStr();
 
-		conf.setVar(HiveConf.ConfVars.HIVEQUERYID, queryId);
-		conf.setVar(HiveConf.ConfVars.HIVEQUERYSTRING, queryStr);
-		maxthreads = HiveConf.getIntVar(conf,
-				HiveConf.ConfVars.EXECPARALLETHREADNUMBER);
+        conf.setVar(HiveConf.ConfVars.HIVEQUERYID, queryId);
+        conf.setVar(HiveConf.ConfVars.HIVEQUERYSTRING, queryStr);
+        maxthreads = HiveConf.getIntVar(conf, HiveConf.ConfVars.EXECPARALLETHREADNUMBER);
 
-		try {
-			LOG.info("Starting command: " + queryStr);
+        try {
+            LOG.info("Starting command: " + queryStr);
 
-			plan.setStarted();
+            plan.setStarted();
 
-			if (SessionState.get() != null) {
-				SessionState
-						.get()
-						.getHiveHistory()
-						.startQuery(queryStr,
-								conf.getVar(HiveConf.ConfVars.HIVEQUERYID));
-				SessionState.get().getHiveHistory().logPlanProgress(plan);
-			}
-			resStream = null;
+            if (SessionState.get() != null) {
+                SessionState.get().getHiveHistory().startQuery(queryStr, conf.getVar(HiveConf.ConfVars.HIVEQUERYID));
+                SessionState.get().getHiveHistory().logPlanProgress(plan);
+            }
+            resStream = null;
 
-			HookContext hookContext = new HookContext(plan, conf);
+            HookContext hookContext = new HookContext(plan, conf);
 
-			for (Hook peh : getPreExecHooks()) {
-				if (peh instanceof ExecuteWithHookContext) {
-					((ExecuteWithHookContext) peh).run(hookContext);
-				} else if (peh instanceof PreExecute) {
-					((PreExecute) peh).run(SessionState.get(),
-							plan.getInputs(), plan.getOutputs(), ShimLoader
-									.getHadoopShims().getUGIForConf(conf));
-				}
-			}
+            for (Hook peh : getPreExecHooks()) {
+                if (peh instanceof ExecuteWithHookContext) {
+                    ((ExecuteWithHookContext) peh).run(hookContext);
+                } else if (peh instanceof PreExecute) {
+                    ((PreExecute) peh).run(SessionState.get(), plan.getInputs(), plan.getOutputs(), ShimLoader
+                            .getHadoopShims().getUGIForConf(conf));
+                }
+            }
 
-			int jobs = Utilities.getMRTasks(plan.getRootTasks()).size();
-			if (jobs > 0) {
-				console.printInfo("Total MapReduce jobs = " + jobs);
-			}
-			if (SessionState.get() != null) {
-				SessionState
-						.get()
-						.getHiveHistory()
-						.setQueryProperty(queryId, Keys.QUERY_NUM_TASKS,
-								String.valueOf(jobs));
-				SessionState.get().getHiveHistory()
-						.setIdToTableMap(plan.getIdToTableNameMap());
-			}
-			String jobname = Utilities.abbreviate(queryStr, maxlen - 6);
+            int jobs = Utilities.getMRTasks(plan.getRootTasks()).size();
+            if (jobs > 0) {
+                console.printInfo("Total MapReduce jobs = " + jobs);
+            }
+            if (SessionState.get() != null) {
+                SessionState.get().getHiveHistory()
+                        .setQueryProperty(queryId, Keys.QUERY_NUM_TASKS, String.valueOf(jobs));
+                SessionState.get().getHiveHistory().setIdToTableMap(plan.getIdToTableNameMap());
+            }
+            String jobname = Utilities.abbreviate(queryStr, maxlen - 6);
 
-			// A runtime that launches runnable tasks as separate Threads
-			// through
-			// TaskRunners
-			// As soon as a task isRunnable, it is put in a queue
-			// At any time, at most maxthreads tasks can be running
-			// The main thread polls the TaskRunners to check if they have
-			// finished.
+            // A runtime that launches runnable tasks as separate Threads
+            // through
+            // TaskRunners
+            // As soon as a task isRunnable, it is put in a queue
+            // At any time, at most maxthreads tasks can be running
+            // The main thread polls the TaskRunners to check if they have
+            // finished.
 
-			Queue<Task<? extends Serializable>> runnable = new LinkedList<Task<? extends Serializable>>();
-			Map<TaskResult, TaskRunner> running = new HashMap<TaskResult, TaskRunner>();
+            Queue<Task<? extends Serializable>> runnable = new LinkedList<Task<? extends Serializable>>();
+            Map<TaskResult, TaskRunner> running = new HashMap<TaskResult, TaskRunner>();
 
-			DriverContext driverCxt = new DriverContext(runnable, ctx);
+            DriverContext driverCxt = new DriverContext(runnable, ctx);
 
-			// Add root Tasks to runnable
+            // Add root Tasks to runnable
 
-			for (Task<? extends Serializable> tsk : plan.getRootTasks()) {
-				driverCxt.addToRunnable(tsk);
-			}
+            for (Task<? extends Serializable> tsk : plan.getRootTasks()) {
+                driverCxt.addToRunnable(tsk);
+            }
 
-			// Loop while you either have tasks running, or tasks queued up
+            // Loop while you either have tasks running, or tasks queued up
 
-			while (running.size() != 0 || runnable.peek() != null) {
-				// Launch upto maxthreads tasks
-				while (runnable.peek() != null && running.size() < maxthreads) {
-					Task<? extends Serializable> tsk = runnable.remove();
-					console.printInfo("executing task " + tsk.getName());
-					launchTask(tsk, queryId, noName, running, jobname, jobs,
-							driverCxt);
-				}
+            while (running.size() != 0 || runnable.peek() != null) {
+                // Launch upto maxthreads tasks
+                while (runnable.peek() != null && running.size() < maxthreads) {
+                    Task<? extends Serializable> tsk = runnable.remove();
+                    console.printInfo("executing task " + tsk.getName());
+                    launchTask(tsk, queryId, noName, running, jobname, jobs, driverCxt);
+                }
 
-				// poll the Tasks to see which one completed
-				TaskResult tskRes = pollTasks(running.keySet());
-				TaskRunner tskRun = running.remove(tskRes);
-				Task<? extends Serializable> tsk = tskRun.getTask();
-				hookContext.addCompleteTask(tskRun);
+                // poll the Tasks to see which one completed
+                TaskResult tskRes = pollTasks(running.keySet());
+                TaskRunner tskRun = running.remove(tskRes);
+                Task<? extends Serializable> tsk = tskRun.getTask();
+                hookContext.addCompleteTask(tskRun);
 
-				int exitVal = tskRes.getExitVal();
-				if (exitVal != 0) {
-					Task<? extends Serializable> backupTask = tsk
-							.getAndInitBackupTask();
-					if (backupTask != null) {
-						errorMessage = "FAILED: Execution Error, return code "
-								+ exitVal + " from " + tsk.getClass().getName();
-						console.printError(errorMessage);
+                int exitVal = tskRes.getExitVal();
+                if (exitVal != 0) {
+                    Task<? extends Serializable> backupTask = tsk.getAndInitBackupTask();
+                    if (backupTask != null) {
+                        errorMessage = "FAILED: Execution Error, return code " + exitVal + " from "
+                                + tsk.getClass().getName();
+                        console.printError(errorMessage);
 
-						errorMessage = "ATTEMPT: Execute BackupTask: "
-								+ backupTask.getClass().getName();
-						console.printError(errorMessage);
+                        errorMessage = "ATTEMPT: Execute BackupTask: " + backupTask.getClass().getName();
+                        console.printError(errorMessage);
 
-						// add backup task to runnable
-						if (DriverContext.isLaunchable(backupTask)) {
-							driverCxt.addToRunnable(backupTask);
-						}
-						continue;
+                        // add backup task to runnable
+                        if (DriverContext.isLaunchable(backupTask)) {
+                            driverCxt.addToRunnable(backupTask);
+                        }
+                        continue;
 
-					} else {
-						// TODO: This error messaging is not very informative.
-						// Fix that.
-						errorMessage = "FAILED: Execution Error, return code "
-								+ exitVal + " from " + tsk.getClass().getName();
-						SQLState = "08S01";
-						console.printError(errorMessage);
-						if (running.size() != 0) {
-							taskCleanup();
-						}
-						// in case we decided to run everything in local mode,
-						// restore the
-						// the jobtracker setting to its initial value
-						ctx.restoreOriginalTracker();
-						return 9;
-					}
-				}
+                    } else {
+                        // TODO: This error messaging is not very informative.
+                        // Fix that.
+                        errorMessage = "FAILED: Execution Error, return code " + exitVal + " from "
+                                + tsk.getClass().getName();
+                        SQLState = "08S01";
+                        console.printError(errorMessage);
+                        if (running.size() != 0) {
+                            taskCleanup();
+                        }
+                        // in case we decided to run everything in local mode,
+                        // restore the
+                        // the jobtracker setting to its initial value
+                        ctx.restoreOriginalTracker();
+                        return 9;
+                    }
+                }
 
-				if (SessionState.get() != null) {
-					SessionState
-							.get()
-							.getHiveHistory()
-							.setTaskProperty(queryId, tsk.getId(),
-									Keys.TASK_RET_CODE, String.valueOf(exitVal));
-					SessionState.get().getHiveHistory().endTask(queryId, tsk);
-				}
+                if (SessionState.get() != null) {
+                    SessionState.get().getHiveHistory()
+                            .setTaskProperty(queryId, tsk.getId(), Keys.TASK_RET_CODE, String.valueOf(exitVal));
+                    SessionState.get().getHiveHistory().endTask(queryId, tsk);
+                }
 
-				if (tsk.getChildTasks() != null) {
-					for (Task<? extends Serializable> child : tsk
-							.getChildTasks()) {
-						// hivesterix: don't check launchable condition
-						// if (DriverContext.isLaunchable(child)) {
-						driverCxt.addToRunnable(child);
-						// }
-					}
-				}
-			}
+                if (tsk.getChildTasks() != null) {
+                    for (Task<? extends Serializable> child : tsk.getChildTasks()) {
+                        // hivesterix: don't check launchable condition
+                        // if (DriverContext.isLaunchable(child)) {
+                        driverCxt.addToRunnable(child);
+                        // }
+                    }
+                }
+            }
 
-			// in case we decided to run everything in local mode, restore the
-			// the jobtracker setting to its initial value
-			ctx.restoreOriginalTracker();
+            // in case we decided to run everything in local mode, restore the
+            // the jobtracker setting to its initial value
+            ctx.restoreOriginalTracker();
 
-			// remove incomplete outputs.
-			// Some incomplete outputs may be added at the beginning, for eg:
-			// for dynamic partitions.
-			// remove them
-			HashSet<WriteEntity> remOutputs = new HashSet<WriteEntity>();
-			for (WriteEntity output : plan.getOutputs()) {
-				if (!output.isComplete()) {
-					remOutputs.add(output);
-				}
-			}
+            // remove incomplete outputs.
+            // Some incomplete outputs may be added at the beginning, for eg:
+            // for dynamic partitions.
+            // remove them
+            HashSet<WriteEntity> remOutputs = new HashSet<WriteEntity>();
+            for (WriteEntity output : plan.getOutputs()) {
+                if (!output.isComplete()) {
+                    remOutputs.add(output);
+                }
+            }
 
-			for (WriteEntity output : remOutputs) {
-				plan.getOutputs().remove(output);
-			}
+            for (WriteEntity output : remOutputs) {
+                plan.getOutputs().remove(output);
+            }
 
-			// Get all the post execution hooks and execute them.
-			for (Hook peh : getPostExecHooks()) {
-				if (peh instanceof ExecuteWithHookContext) {
-					((ExecuteWithHookContext) peh).run(hookContext);
-				} else if (peh instanceof PostExecute) {
-					((PostExecute) peh)
-							.run(SessionState.get(),
-									plan.getInputs(),
-									plan.getOutputs(),
-									(SessionState.get() != null ? SessionState
-											.get().getLineageState()
-											.getLineageInfo() : null),
-									ShimLoader.getHadoopShims().getUGIForConf(
-											conf));
-				}
-			}
+            // Get all the post execution hooks and execute them.
+            for (Hook peh : getPostExecHooks()) {
+                if (peh instanceof ExecuteWithHookContext) {
+                    ((ExecuteWithHookContext) peh).run(hookContext);
+                } else if (peh instanceof PostExecute) {
+                    ((PostExecute) peh)
+                            .run(SessionState.get(), plan.getInputs(), plan.getOutputs(),
+                                    (SessionState.get() != null ? SessionState.get().getLineageState().getLineageInfo()
+                                            : null), ShimLoader.getHadoopShims().getUGIForConf(conf));
+                }
+            }
 
-			if (SessionState.get() != null) {
-				SessionState
-						.get()
-						.getHiveHistory()
-						.setQueryProperty(queryId, Keys.QUERY_RET_CODE,
-								String.valueOf(0));
-				SessionState.get().getHiveHistory().printRowCount(queryId);
-			}
-		} catch (Exception e) {
-			if (SessionState.get() != null) {
-				SessionState
-						.get()
-						.getHiveHistory()
-						.setQueryProperty(queryId, Keys.QUERY_RET_CODE,
-								String.valueOf(12));
-			}
-			// TODO: do better with handling types of Exception here
-			errorMessage = "FAILED: Hive Internal Error: "
-					+ Utilities.getNameMessage(e);
-			SQLState = "08S01";
-			console.printError(errorMessage + "\n"
-					+ org.apache.hadoop.util.StringUtils.stringifyException(e));
-			return (12);
-		} finally {
-			if (SessionState.get() != null) {
-				SessionState.get().getHiveHistory().endQuery(queryId);
-			}
-			if (noName) {
-				conf.setVar(HiveConf.ConfVars.HADOOPJOBNAME, "");
-			}
-		}
-		plan.setDone();
+            if (SessionState.get() != null) {
+                SessionState.get().getHiveHistory().setQueryProperty(queryId, Keys.QUERY_RET_CODE, String.valueOf(0));
+                SessionState.get().getHiveHistory().printRowCount(queryId);
+            }
+        } catch (Exception e) {
+            if (SessionState.get() != null) {
+                SessionState.get().getHiveHistory().setQueryProperty(queryId, Keys.QUERY_RET_CODE, String.valueOf(12));
+            }
+            // TODO: do better with handling types of Exception here
+            errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e);
+            SQLState = "08S01";
+            console.printError(errorMessage + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
+            return (12);
+        } finally {
+            if (SessionState.get() != null) {
+                SessionState.get().getHiveHistory().endQuery(queryId);
+            }
+            if (noName) {
+                conf.setVar(HiveConf.ConfVars.HADOOPJOBNAME, "");
+            }
+        }
+        plan.setDone();
 
-		if (SessionState.get() != null) {
-			try {
-				SessionState.get().getHiveHistory().logPlanProgress(plan);
-			} catch (Exception e) {
-			}
-		}
-		console.printInfo("OK");
+        if (SessionState.get() != null) {
+            try {
+                SessionState.get().getHiveHistory().logPlanProgress(plan);
+            } catch (Exception e) {
+            }
+        }
+        console.printInfo("OK");
 
-		return (0);
-	}
+        return (0);
+    }
 
-	/**
-	 * Launches a new task
-	 * 
-	 * @param tsk
-	 *            task being launched
-	 * @param queryId
-	 *            Id of the query containing the task
-	 * @param noName
-	 *            whether the task has a name set
-	 * @param running
-	 *            map from taskresults to taskrunners
-	 * @param jobname
-	 *            name of the task, if it is a map-reduce job
-	 * @param jobs
-	 *            number of map-reduce jobs
-	 * @param curJobNo
-	 *            the sequential number of the next map-reduce job
-	 * @return the updated number of last the map-reduce job launched
-	 */
+    /**
+     * Launches a new task
+     * 
+     * @param tsk
+     *            task being launched
+     * @param queryId
+     *            Id of the query containing the task
+     * @param noName
+     *            whether the task has a name set
+     * @param running
+     *            map from taskresults to taskrunners
+     * @param jobname
+     *            name of the task, if it is a map-reduce job
+     * @param jobs
+     *            number of map-reduce jobs
+     * @param curJobNo
+     *            the sequential number of the next map-reduce job
+     * @return the updated number of last the map-reduce job launched
+     */
 
-	public void launchTask(Task<? extends Serializable> tsk, String queryId,
-			boolean noName, Map<TaskResult, TaskRunner> running,
-			String jobname, int jobs, DriverContext cxt) {
+    public void launchTask(Task<? extends Serializable> tsk, String queryId, boolean noName,
+            Map<TaskResult, TaskRunner> running, String jobname, int jobs, DriverContext cxt) {
 
-		if (SessionState.get() != null) {
-			SessionState.get().getHiveHistory()
-					.startTask(queryId, tsk, tsk.getClass().getName());
-		}
-		if (tsk.isMapRedTask() && !(tsk instanceof ConditionalTask)) {
-			if (noName) {
-				conf.setVar(HiveConf.ConfVars.HADOOPJOBNAME, jobname + "("
-						+ tsk.getId() + ")");
-			}
-			cxt.incCurJobNo(1);
-			console.printInfo("Launching Job " + cxt.getCurJobNo() + " out of "
-					+ jobs);
-		}
-		tsk.initialize(conf, plan, cxt);
-		TaskResult tskRes = new TaskResult();
-		TaskRunner tskRun = new TaskRunner(tsk, tskRes);
+        if (SessionState.get() != null) {
+            SessionState.get().getHiveHistory().startTask(queryId, tsk, tsk.getClass().getName());
+        }
+        if (tsk.isMapRedTask() && !(tsk instanceof ConditionalTask)) {
+            if (noName) {
+                conf.setVar(HiveConf.ConfVars.HADOOPJOBNAME, jobname + "(" + tsk.getId() + ")");
+            }
+            cxt.incCurJobNo(1);
+            console.printInfo("Launching Job " + cxt.getCurJobNo() + " out of " + jobs);
+        }
+        tsk.initialize(conf, plan, cxt);
+        TaskResult tskRes = new TaskResult();
+        TaskRunner tskRun = new TaskRunner(tsk, tskRes);
 
-		// HiveConf.getBoolVar(conf, HiveConf.ConfVars.EXECPARALLEL) &&
-		// Launch Task: hivesterix tweak
-		if (tsk instanceof MapRedTask || tsk instanceof StatsTask) {
-			// Launch it in the parallel mode, as a separate thread only for MR
-			// tasks
-			tskRes.setRunning(false);
-			tskRes.setExitVal(0);
-		} else if (tsk instanceof ConditionalTask) {
-			ConditionalTask condTask = (ConditionalTask) tsk;
-			ConditionalResolver crs = condTask.getResolver();
-			if (crs instanceof ConditionalResolverMergeFiles) {
-				tskRes.setRunning(false);
-				tskRes.setExitVal(0);
+        // HiveConf.getBoolVar(conf, HiveConf.ConfVars.EXECPARALLEL) &&
+        // Launch Task: hivesterix tweak
+        if (tsk instanceof MapRedTask || tsk instanceof StatsTask) {
+            // Launch it in the parallel mode, as a separate thread only for MR
+            // tasks
+            tskRes.setRunning(false);
+            tskRes.setExitVal(0);
+        } else if (tsk instanceof ConditionalTask) {
+            ConditionalTask condTask = (ConditionalTask) tsk;
+            ConditionalResolver crs = condTask.getResolver();
+            if (crs instanceof ConditionalResolverMergeFiles) {
+                tskRes.setRunning(false);
+                tskRes.setExitVal(0);
 
-				List<Task<? extends Serializable>> children = condTask
-						.getListTasks();
-				for (Task<? extends Serializable> child : children)
-					if (child instanceof MapRedTask)
-						cxt.addToRunnable(child);
-			}
-		} else {
-			tskRun.runSequential();
-		}
-		running.put(tskRes, tskRun);
-		return;
-	}
+                List<Task<? extends Serializable>> children = condTask.getListTasks();
+                for (Task<? extends Serializable> child : children)
+                    if (child instanceof MapRedTask)
+                        cxt.addToRunnable(child);
+            }
+        } else {
+            tskRun.runSequential();
+        }
+        running.put(tskRes, tskRun);
+        return;
+    }
 
-	/**
-	 * Cleans up remaining tasks in case of failure
-	 */
+    /**
+     * Cleans up remaining tasks in case of failure
+     */
 
-	public void taskCleanup() {
-		// The currently existing Shutdown hooks will be automatically called,
-		// killing the map-reduce processes.
-		// The non MR processes will be killed as well.
-		System.exit(9);
-	}
+    public void taskCleanup() {
+        // The currently existing Shutdown hooks will be automatically called,
+        // killing the map-reduce processes.
+        // The non MR processes will be killed as well.
+        System.exit(9);
+    }
 
-	/**
-	 * Polls running tasks to see if a task has ended.
-	 * 
-	 * @param results
-	 *            Set of result objects for running tasks
-	 * @return The result object for any completed/failed task
-	 */
+    /**
+     * Polls running tasks to see if a task has ended.
+     * 
+     * @param results
+     *            Set of result objects for running tasks
+     * @return The result object for any completed/failed task
+     */
 
-	public TaskResult pollTasks(Set<TaskResult> results) {
-		Iterator<TaskResult> resultIterator = results.iterator();
-		while (true) {
-			while (resultIterator.hasNext()) {
-				TaskResult tskRes = resultIterator.next();
-				if (tskRes.isRunning() == false) {
-					return tskRes;
-				}
-			}
+    public TaskResult pollTasks(Set<TaskResult> results) {
+        Iterator<TaskResult> resultIterator = results.iterator();
+        while (true) {
+            while (resultIterator.hasNext()) {
+                TaskResult tskRes = resultIterator.next();
+                if (tskRes.isRunning() == false) {
+                    return tskRes;
+                }
+            }
 
-			// In this loop, nothing was found
-			// Sleep 10 seconds and restart
-			try {
-				Thread.sleep(sleeptime);
-			} catch (InterruptedException ie) {
-				// Do Nothing
-				;
-			}
-			resultIterator = results.iterator();
-		}
-	}
+            // In this loop, nothing was found
+            // Sleep 10 seconds and restart
+            try {
+                Thread.sleep(sleeptime);
+            } catch (InterruptedException ie) {
+                // Do Nothing
+                ;
+            }
+            resultIterator = results.iterator();
+        }
+    }
 
-	public boolean getResults(ArrayList<String> res) throws IOException {
-		if (plan != null && plan.getFetchTask() != null) {
-			FetchTask ft = plan.getFetchTask();
-			ft.setMaxRows(maxRows);
-			return ft.fetch(res);
-		}
+    public boolean getResults(ArrayList<String> res) throws IOException {
+        if (plan != null && plan.getFetchTask() != null) {
+            FetchTask ft = plan.getFetchTask();
+            ft.setMaxRows(maxRows);
+            return ft.fetch(res);
+        }
 
-		if (resStream == null) {
-			resStream = ctx.getStream();
-		}
-		if (resStream == null) {
-			return false;
-		}
+        if (resStream == null) {
+            resStream = ctx.getStream();
+        }
+        if (resStream == null) {
+            return false;
+        }
 
-		int numRows = 0;
-		String row = null;
+        int numRows = 0;
+        String row = null;
 
-		while (numRows < maxRows) {
-			if (resStream == null) {
-				if (numRows > 0) {
-					return true;
-				} else {
-					return false;
-				}
-			}
+        while (numRows < maxRows) {
+            if (resStream == null) {
+                if (numRows > 0) {
+                    return true;
+                } else {
+                    return false;
+                }
+            }
 
-			bos.reset();
-			Utilities.StreamStatus ss;
-			try {
-				ss = Utilities.readColumn(resStream, bos);
-				if (bos.getCount() > 0) {
-					row = new String(bos.getData(), 0, bos.getCount(), "UTF-8");
-				} else if (ss == Utilities.StreamStatus.TERMINATED) {
-					row = new String();
-				}
+            bos.reset();
+            Utilities.StreamStatus ss;
+            try {
+                ss = Utilities.readColumn(resStream, bos);
+                if (bos.getCount() > 0) {
+                    row = new String(bos.getData(), 0, bos.getCount(), "UTF-8");
+                } else if (ss == Utilities.StreamStatus.TERMINATED) {
+                    row = new String();
+                }
 
-				if (row != null) {
-					numRows++;
-					res.add(row);
-				}
-			} catch (IOException e) {
-				console.printError("FAILED: Unexpected IO exception : "
-						+ e.getMessage());
-				res = null;
-				return false;
-			}
+                if (row != null) {
+                    numRows++;
+                    res.add(row);
+                }
+            } catch (IOException e) {
+                console.printError("FAILED: Unexpected IO exception : " + e.getMessage());
+                res = null;
+                return false;
+            }
 
-			if (ss == Utilities.StreamStatus.EOF) {
-				resStream = ctx.getStream();
-			}
-		}
-		return true;
-	}
+            if (ss == Utilities.StreamStatus.EOF) {
+                resStream = ctx.getStream();
+            }
+        }
+        return true;
+    }
 
-	public int close() {
-		try {
-			if (plan != null) {
-				FetchTask fetchTask = plan.getFetchTask();
-				if (null != fetchTask) {
-					try {
-						fetchTask.clearFetch();
-					} catch (Exception e) {
-						LOG.debug(" Exception while clearing the Fetch task ",
-								e);
-					}
-				}
-			}
-			if (ctx != null) {
-				ctx.clear();
-			}
-			if (null != resStream) {
-				try {
-					((FSDataInputStream) resStream).close();
-				} catch (Exception e) {
-					LOG.debug(" Exception while closing the resStream ", e);
-				}
-			}
-		} catch (Exception e) {
-			console.printError("FAILED: Hive Internal Error: "
-					+ Utilities.getNameMessage(e) + "\n"
-					+ org.apache.hadoop.util.StringUtils.stringifyException(e));
-			return 13;
-		}
+    public int close() {
+        try {
+            if (plan != null) {
+                FetchTask fetchTask = plan.getFetchTask();
+                if (null != fetchTask) {
+                    try {
+                        fetchTask.clearFetch();
+                    } catch (Exception e) {
+                        LOG.debug(" Exception while clearing the Fetch task ", e);
+                    }
+                }
+            }
+            if (ctx != null) {
+                ctx.clear();
+            }
+            if (null != resStream) {
+                try {
+                    ((FSDataInputStream) resStream).close();
+                } catch (Exception e) {
+                    LOG.debug(" Exception while closing the resStream ", e);
+                }
+            }
+        } catch (Exception e) {
+            console.printError("FAILED: Hive Internal Error: " + Utilities.getNameMessage(e) + "\n"
+                    + org.apache.hadoop.util.StringUtils.stringifyException(e));
+            return 13;
+        }
 
-		return 0;
-	}
+        return 0;
+    }
 
-	public void destroy() {
-		releaseLocks();
-	}
+    public void destroy() {
+        releaseLocks();
+    }
 
-	public org.apache.hadoop.hive.ql.plan.api.Query getQueryPlan()
-			throws IOException {
-		return plan.getQueryPlan();
-	}
+    public org.apache.hadoop.hive.ql.plan.api.Query getQueryPlan() throws IOException {
+        return plan.getQueryPlan();
+    }
 
-	public int getTryCount() {
-		return tryCount;
-	}
+    public int getTryCount() {
+        return tryCount;
+    }
 
-	public void setTryCount(int tryCount) {
-		this.tryCount = tryCount;
-	}
+    public void setTryCount(int tryCount) {
+        this.tryCount = tryCount;
+    }
 }
diff --git a/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java b/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java
index b174432..0f445f4 100644
--- a/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java
+++ b/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java
@@ -47,207 +47,187 @@
 
 /**
  * GenericUDAFAverage.
- * 
  */
 @Description(name = "avg", value = "_FUNC_(x) - Returns the mean of a set of numbers")
 public class GenericUDAFAverage extends AbstractGenericUDAFResolver {
 
-	static final Log LOG = LogFactory
-			.getLog(GenericUDAFAverage.class.getName());
+    static final Log LOG = LogFactory.getLog(GenericUDAFAverage.class.getName());
 
-	@Override
-	public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters)
-			throws SemanticException {
-		if (parameters.length != 1) {
-			throw new UDFArgumentTypeException(parameters.length - 1,
-					"Exactly one argument is expected.");
-		}
+    @Override
+    public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException {
+        if (parameters.length != 1) {
+            throw new UDFArgumentTypeException(parameters.length - 1, "Exactly one argument is expected.");
+        }
 
-		if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
-			throw new UDFArgumentTypeException(0,
-					"Only primitive type arguments are accepted but "
-							+ parameters[0].getTypeName() + " is passed.");
-		}
-		switch (((PrimitiveTypeInfo) parameters[0]).getPrimitiveCategory()) {
-		case BYTE:
-		case SHORT:
-		case INT:
-		case LONG:
-		case FLOAT:
-		case DOUBLE:
-		case STRING:
-			return new GenericUDAFAverageEvaluator();
-		case BOOLEAN:
-		default:
-			throw new UDFArgumentTypeException(0,
-					"Only numeric or string type arguments are accepted but "
-							+ parameters[0].getTypeName() + " is passed.");
-		}
-	}
+        if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+            throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but "
+                    + parameters[0].getTypeName() + " is passed.");
+        }
+        switch (((PrimitiveTypeInfo) parameters[0]).getPrimitiveCategory()) {
+            case BYTE:
+            case SHORT:
+            case INT:
+            case LONG:
+            case FLOAT:
+            case DOUBLE:
+            case STRING:
+                return new GenericUDAFAverageEvaluator();
+            case BOOLEAN:
+            default:
+                throw new UDFArgumentTypeException(0, "Only numeric or string type arguments are accepted but "
+                        + parameters[0].getTypeName() + " is passed.");
+        }
+    }
 
-	/**
-	 * GenericUDAFAverageEvaluator.
-	 * 
-	 */
-	public static class GenericUDAFAverageEvaluator extends
-			GenericUDAFEvaluator {
+    /**
+     * GenericUDAFAverageEvaluator.
+     */
+    public static class GenericUDAFAverageEvaluator extends GenericUDAFEvaluator {
 
-		// For PARTIAL1 and COMPLETE
-		PrimitiveObjectInspector inputOI;
+        // For PARTIAL1 and COMPLETE
+        PrimitiveObjectInspector inputOI;
 
-		// For PARTIAL2 and FINAL
-		StructObjectInspector soi;
-		StructField countField;
-		StructField sumField;
-		LongObjectInspector countFieldOI;
-		DoubleObjectInspector sumFieldOI;
+        // For PARTIAL2 and FINAL
+        StructObjectInspector soi;
+        StructField countField;
+        StructField sumField;
+        LongObjectInspector countFieldOI;
+        DoubleObjectInspector sumFieldOI;
 
-		// For PARTIAL1 and PARTIAL2
-		Object[] partialResult;
+        // For PARTIAL1 and PARTIAL2
+        Object[] partialResult;
 
-		// For FINAL and COMPLETE
-		DoubleWritable result;
+        // For FINAL and COMPLETE
+        DoubleWritable result;
 
-		@Override
-		public ObjectInspector init(Mode m, ObjectInspector[] parameters)
-				throws HiveException {
-			assert (parameters.length == 1);
-			super.init(m, parameters);
+        @Override
+        public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
+            assert (parameters.length == 1);
+            super.init(m, parameters);
 
-			// init input
-			if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {
-				inputOI = (PrimitiveObjectInspector) parameters[0];
-			} else {
-				soi = (StructObjectInspector) parameters[0];
-				countField = soi.getStructFieldRef("count");
-				sumField = soi.getStructFieldRef("sum");
-				countFieldOI = (LongObjectInspector) countField
-						.getFieldObjectInspector();
-				sumFieldOI = (DoubleObjectInspector) sumField
-						.getFieldObjectInspector();
-			}
+            // init input
+            if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {
+                inputOI = (PrimitiveObjectInspector) parameters[0];
+            } else {
+                soi = (StructObjectInspector) parameters[0];
+                countField = soi.getStructFieldRef("count");
+                sumField = soi.getStructFieldRef("sum");
+                countFieldOI = (LongObjectInspector) countField.getFieldObjectInspector();
+                sumFieldOI = (DoubleObjectInspector) sumField.getFieldObjectInspector();
+            }
 
-			// init output
-			if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {
-				// The output of a partial aggregation is a struct containing
-				// a "long" count and a "double" sum.
+            // init output
+            if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {
+                // The output of a partial aggregation is a struct containing
+                // a "long" count and a "double" sum.
 
-				ArrayList<ObjectInspector> foi = new ArrayList<ObjectInspector>();
-				foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
-				foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
-				ArrayList<String> fname = new ArrayList<String>();
-				fname.add("count");
-				fname.add("sum");
-				partialResult = new Object[2];
-				partialResult[0] = new LongWritable(0);
-				partialResult[1] = new DoubleWritable(0);
-				return ObjectInspectorFactory.getStandardStructObjectInspector(
-						fname, foi);
+                ArrayList<ObjectInspector> foi = new ArrayList<ObjectInspector>();
+                foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
+                foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+                ArrayList<String> fname = new ArrayList<String>();
+                fname.add("count");
+                fname.add("sum");
+                partialResult = new Object[2];
+                partialResult[0] = new LongWritable(0);
+                partialResult[1] = new DoubleWritable(0);
+                return ObjectInspectorFactory.getStandardStructObjectInspector(fname, foi);
 
-			} else {
-				result = new DoubleWritable(0);
-				return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
-			}
-		}
+            } else {
+                result = new DoubleWritable(0);
+                return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
+            }
+        }
 
-		static class AverageAgg implements SerializableBuffer {
-			long count;
-			double sum;
+        static class AverageAgg implements SerializableBuffer {
+            long count;
+            double sum;
 
-			@Override
-			public void deSerializeAggBuffer(byte[] data, int start, int len) {
-				count = BufferSerDeUtil.getLong(data, start);
-				start += 8;
-				sum = BufferSerDeUtil.getDouble(data, start);
-			}
+            @Override
+            public void deSerializeAggBuffer(byte[] data, int start, int len) {
+                count = BufferSerDeUtil.getLong(data, start);
+                start += 8;
+                sum = BufferSerDeUtil.getDouble(data, start);
+            }
 
-			@Override
-			public void serializeAggBuffer(byte[] data, int start, int len) {
-				BufferSerDeUtil.writeLong(count, data, start);
-				start += 8;
-				BufferSerDeUtil.writeDouble(sum, data, start);
-			}
+            @Override
+            public void serializeAggBuffer(byte[] data, int start, int len) {
+                BufferSerDeUtil.writeLong(count, data, start);
+                start += 8;
+                BufferSerDeUtil.writeDouble(sum, data, start);
+            }
 
-			@Override
-			public void serializeAggBuffer(DataOutput output)
-					throws IOException {
-				output.writeLong(count);
-				output.writeDouble(sum);
-			}
-		};
+            @Override
+            public void serializeAggBuffer(DataOutput output) throws IOException {
+                output.writeLong(count);
+                output.writeDouble(sum);
+            }
+        };
 
-		@Override
-		public AggregationBuffer getNewAggregationBuffer() throws HiveException {
-			AverageAgg result = new AverageAgg();
-			reset(result);
-			return result;
-		}
+        @Override
+        public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+            AverageAgg result = new AverageAgg();
+            reset(result);
+            return result;
+        }
 
-		@Override
-		public void reset(AggregationBuffer agg) throws HiveException {
-			AverageAgg myagg = (AverageAgg) agg;
-			myagg.count = 0;
-			myagg.sum = 0;
-		}
+        @Override
+        public void reset(AggregationBuffer agg) throws HiveException {
+            AverageAgg myagg = (AverageAgg) agg;
+            myagg.count = 0;
+            myagg.sum = 0;
+        }
 
-		boolean warned = false;
+        boolean warned = false;
 
-		@Override
-		public void iterate(AggregationBuffer agg, Object[] parameters)
-				throws HiveException {
-			assert (parameters.length == 1);
-			Object p = parameters[0];
-			if (p != null) {
-				AverageAgg myagg = (AverageAgg) agg;
-				try {
-					double v = PrimitiveObjectInspectorUtils.getDouble(p,
-							inputOI);
-					myagg.count++;
-					myagg.sum += v;
-				} catch (NumberFormatException e) {
-					if (!warned) {
-						warned = true;
-						LOG.warn(getClass().getSimpleName() + " "
-								+ StringUtils.stringifyException(e));
-						LOG.warn(getClass().getSimpleName()
-								+ " ignoring similar exceptions.");
-					}
-				}
-			}
-		}
+        @Override
+        public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
+            assert (parameters.length == 1);
+            Object p = parameters[0];
+            if (p != null) {
+                AverageAgg myagg = (AverageAgg) agg;
+                try {
+                    double v = PrimitiveObjectInspectorUtils.getDouble(p, inputOI);
+                    myagg.count++;
+                    myagg.sum += v;
+                } catch (NumberFormatException e) {
+                    if (!warned) {
+                        warned = true;
+                        LOG.warn(getClass().getSimpleName() + " " + StringUtils.stringifyException(e));
+                        LOG.warn(getClass().getSimpleName() + " ignoring similar exceptions.");
+                    }
+                }
+            }
+        }
 
-		@Override
-		public Object terminatePartial(AggregationBuffer agg)
-				throws HiveException {
-			AverageAgg myagg = (AverageAgg) agg;
-			((LongWritable) partialResult[0]).set(myagg.count);
-			((DoubleWritable) partialResult[1]).set(myagg.sum);
-			return partialResult;
-		}
+        @Override
+        public Object terminatePartial(AggregationBuffer agg) throws HiveException {
+            AverageAgg myagg = (AverageAgg) agg;
+            ((LongWritable) partialResult[0]).set(myagg.count);
+            ((DoubleWritable) partialResult[1]).set(myagg.sum);
+            return partialResult;
+        }
 
-		@Override
-		public void merge(AggregationBuffer agg, Object partial)
-				throws HiveException {
-			if (partial != null) {
-				AverageAgg myagg = (AverageAgg) agg;
-				Object partialCount = soi.getStructFieldData(partial,
-						countField);
-				Object partialSum = soi.getStructFieldData(partial, sumField);
-				myagg.count += countFieldOI.get(partialCount);
-				myagg.sum += sumFieldOI.get(partialSum);
-			}
-		}
+        @Override
+        public void merge(AggregationBuffer agg, Object partial) throws HiveException {
+            if (partial != null) {
+                AverageAgg myagg = (AverageAgg) agg;
+                Object partialCount = soi.getStructFieldData(partial, countField);
+                Object partialSum = soi.getStructFieldData(partial, sumField);
+                myagg.count += countFieldOI.get(partialCount);
+                myagg.sum += sumFieldOI.get(partialSum);
+            }
+        }
 
-		@Override
-		public Object terminate(AggregationBuffer agg) throws HiveException {
-			AverageAgg myagg = (AverageAgg) agg;
-			if (myagg.count == 0) {
-				return null;
-			} else {
-				result.set(myagg.sum / myagg.count);
-				return result;
-			}
-		}
-	}
+        @Override
+        public Object terminate(AggregationBuffer agg) throws HiveException {
+            AverageAgg myagg = (AverageAgg) agg;
+            if (myagg.count == 0) {
+                return null;
+            } else {
+                result.set(myagg.sum / myagg.count);
+                return result;
+            }
+        }
+    }
 
 }
diff --git a/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCorrelation.java b/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCorrelation.java
index 716faac..2c4022e 100644
--- a/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCorrelation.java
+++ b/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCorrelation.java
@@ -51,378 +51,342 @@
  * Pebay, Sandia Labs and
  * "The Art of Computer Programming, volume 2: Seminumerical Algorithms", Donald
  * Knuth.
- * 
  * Incremental: n : <count> mx_n = mx_(n-1) + [x_n - mx_(n-1)]/n : <xavg> my_n =
  * my_(n-1) + [y_n - my_(n-1)]/n : <yavg> c_n = c_(n-1) + (x_n - mx_(n-1))*(y_n
  * - my_n) : <covariance * n> vx_n = vx_(n-1) + (x_n - mx_n)(x_n - mx_(n-1)):
  * <variance * n> vy_n = vy_(n-1) + (y_n - my_n)(y_n - my_(n-1)): <variance * n>
- * 
  * Merge: c_(A,B) = c_A + c_B + (mx_A - mx_B)*(my_A - my_B)*n_A*n_B/(n_A+n_B)
  * vx_(A,B) = vx_A + vx_B + (mx_A - mx_B)*(mx_A - mx_B)*n_A*n_B/(n_A+n_B)
  * vy_(A,B) = vy_A + vy_B + (my_A - my_B)*(my_A - my_B)*n_A*n_B/(n_A+n_B)
- * 
  */
 @Description(name = "corr", value = "_FUNC_(x,y) - Returns the Pearson coefficient of correlation\n"
-		+ "between a set of number pairs", extended = "The function takes as arguments any pair of numeric types and returns a double.\n"
-		+ "Any pair with a NULL is ignored. If the function is applied to an empty set or\n"
-		+ "a singleton set, NULL will be returned. Otherwise, it computes the following:\n"
-		+ "   COVAR_POP(x,y)/(STDDEV_POP(x)*STDDEV_POP(y))\n"
-		+ "where neither x nor y is null,\n"
-		+ "COVAR_POP is the population covariance,\n"
-		+ "and STDDEV_POP is the population standard deviation.")
+        + "between a set of number pairs", extended = "The function takes as arguments any pair of numeric types and returns a double.\n"
+        + "Any pair with a NULL is ignored. If the function is applied to an empty set or\n"
+        + "a singleton set, NULL will be returned. Otherwise, it computes the following:\n"
+        + "   COVAR_POP(x,y)/(STDDEV_POP(x)*STDDEV_POP(y))\n"
+        + "where neither x nor y is null,\n"
+        + "COVAR_POP is the population covariance,\n" + "and STDDEV_POP is the population standard deviation.")
 public class GenericUDAFCorrelation extends AbstractGenericUDAFResolver {
 
-	static final Log LOG = LogFactory.getLog(GenericUDAFCorrelation.class
-			.getName());
+    static final Log LOG = LogFactory.getLog(GenericUDAFCorrelation.class.getName());
 
-	@Override
-	public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters)
-			throws SemanticException {
-		if (parameters.length != 2) {
-			throw new UDFArgumentTypeException(parameters.length - 1,
-					"Exactly two arguments are expected.");
-		}
+    @Override
+    public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException {
+        if (parameters.length != 2) {
+            throw new UDFArgumentTypeException(parameters.length - 1, "Exactly two arguments are expected.");
+        }
 
-		if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
-			throw new UDFArgumentTypeException(0,
-					"Only primitive type arguments are accepted but "
-							+ parameters[0].getTypeName() + " is passed.");
-		}
+        if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+            throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but "
+                    + parameters[0].getTypeName() + " is passed.");
+        }
 
-		if (parameters[1].getCategory() != ObjectInspector.Category.PRIMITIVE) {
-			throw new UDFArgumentTypeException(1,
-					"Only primitive type arguments are accepted but "
-							+ parameters[1].getTypeName() + " is passed.");
-		}
+        if (parameters[1].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+            throw new UDFArgumentTypeException(1, "Only primitive type arguments are accepted but "
+                    + parameters[1].getTypeName() + " is passed.");
+        }
 
-		switch (((PrimitiveTypeInfo) parameters[0]).getPrimitiveCategory()) {
-		case BYTE:
-		case SHORT:
-		case INT:
-		case LONG:
-		case FLOAT:
-		case DOUBLE:
-			switch (((PrimitiveTypeInfo) parameters[1]).getPrimitiveCategory()) {
-			case BYTE:
-			case SHORT:
-			case INT:
-			case LONG:
-			case FLOAT:
-			case DOUBLE:
-				return new GenericUDAFCorrelationEvaluator();
-			case STRING:
-			case BOOLEAN:
-			default:
-				throw new UDFArgumentTypeException(1,
-						"Only numeric type arguments are accepted but "
-								+ parameters[1].getTypeName() + " is passed.");
-			}
-		case STRING:
-		case BOOLEAN:
-		default:
-			throw new UDFArgumentTypeException(0,
-					"Only numeric type arguments are accepted but "
-							+ parameters[0].getTypeName() + " is passed.");
-		}
-	}
+        switch (((PrimitiveTypeInfo) parameters[0]).getPrimitiveCategory()) {
+            case BYTE:
+            case SHORT:
+            case INT:
+            case LONG:
+            case FLOAT:
+            case DOUBLE:
+                switch (((PrimitiveTypeInfo) parameters[1]).getPrimitiveCategory()) {
+                    case BYTE:
+                    case SHORT:
+                    case INT:
+                    case LONG:
+                    case FLOAT:
+                    case DOUBLE:
+                        return new GenericUDAFCorrelationEvaluator();
+                    case STRING:
+                    case BOOLEAN:
+                    default:
+                        throw new UDFArgumentTypeException(1, "Only numeric type arguments are accepted but "
+                                + parameters[1].getTypeName() + " is passed.");
+                }
+            case STRING:
+            case BOOLEAN:
+            default:
+                throw new UDFArgumentTypeException(0, "Only numeric type arguments are accepted but "
+                        + parameters[0].getTypeName() + " is passed.");
+        }
+    }
 
-	/**
-	 * Evaluate the Pearson correlation coefficient using a stable one-pass
-	 * algorithm, based on work by Philippe Pébay and Donald Knuth.
-	 * 
-	 * Incremental: n : <count> mx_n = mx_(n-1) + [x_n - mx_(n-1)]/n : <xavg>
-	 * my_n = my_(n-1) + [y_n - my_(n-1)]/n : <yavg> c_n = c_(n-1) + (x_n -
-	 * mx_(n-1))*(y_n - my_n) : <covariance * n> vx_n = vx_(n-1) + (x_n -
-	 * mx_n)(x_n - mx_(n-1)): <variance * n> vy_n = vy_(n-1) + (y_n - my_n)(y_n
-	 * - my_(n-1)): <variance * n>
-	 * 
-	 * Merge: c_X = c_A + c_B + (mx_A - mx_B)*(my_A - my_B)*n_A*n_B/n_X vx_(A,B)
-	 * = vx_A + vx_B + (mx_A - mx_B)*(mx_A - mx_B)*n_A*n_B/(n_A+n_B) vy_(A,B) =
-	 * vy_A + vy_B + (my_A - my_B)*(my_A - my_B)*n_A*n_B/(n_A+n_B)
-	 * 
-	 */
-	public static class GenericUDAFCorrelationEvaluator extends
-			GenericUDAFEvaluator {
+    /**
+     * Evaluate the Pearson correlation coefficient using a stable one-pass
+     * algorithm, based on work by Philippe Pébay and Donald Knuth.
+     * Incremental: n : <count> mx_n = mx_(n-1) + [x_n - mx_(n-1)]/n : <xavg>
+     * my_n = my_(n-1) + [y_n - my_(n-1)]/n : <yavg> c_n = c_(n-1) + (x_n -
+     * mx_(n-1))*(y_n - my_n) : <covariance * n> vx_n = vx_(n-1) + (x_n -
+     * mx_n)(x_n - mx_(n-1)): <variance * n> vy_n = vy_(n-1) + (y_n - my_n)(y_n
+     * - my_(n-1)): <variance * n>
+     * Merge: c_X = c_A + c_B + (mx_A - mx_B)*(my_A - my_B)*n_A*n_B/n_X vx_(A,B)
+     * = vx_A + vx_B + (mx_A - mx_B)*(mx_A - mx_B)*n_A*n_B/(n_A+n_B) vy_(A,B) =
+     * vy_A + vy_B + (my_A - my_B)*(my_A - my_B)*n_A*n_B/(n_A+n_B)
+     */
+    public static class GenericUDAFCorrelationEvaluator extends GenericUDAFEvaluator {
 
-		// For PARTIAL1 and COMPLETE
-		private PrimitiveObjectInspector xInputOI;
-		private PrimitiveObjectInspector yInputOI;
+        // For PARTIAL1 and COMPLETE
+        private PrimitiveObjectInspector xInputOI;
+        private PrimitiveObjectInspector yInputOI;
 
-		// For PARTIAL2 and FINAL
-		private StructObjectInspector soi;
-		private StructField countField;
-		private StructField xavgField;
-		private StructField yavgField;
-		private StructField xvarField;
-		private StructField yvarField;
-		private StructField covarField;
-		private LongObjectInspector countFieldOI;
-		private DoubleObjectInspector xavgFieldOI;
-		private DoubleObjectInspector yavgFieldOI;
-		private DoubleObjectInspector xvarFieldOI;
-		private DoubleObjectInspector yvarFieldOI;
-		private DoubleObjectInspector covarFieldOI;
+        // For PARTIAL2 and FINAL
+        private StructObjectInspector soi;
+        private StructField countField;
+        private StructField xavgField;
+        private StructField yavgField;
+        private StructField xvarField;
+        private StructField yvarField;
+        private StructField covarField;
+        private LongObjectInspector countFieldOI;
+        private DoubleObjectInspector xavgFieldOI;
+        private DoubleObjectInspector yavgFieldOI;
+        private DoubleObjectInspector xvarFieldOI;
+        private DoubleObjectInspector yvarFieldOI;
+        private DoubleObjectInspector covarFieldOI;
 
-		// For PARTIAL1 and PARTIAL2
-		private Object[] partialResult;
+        // For PARTIAL1 and PARTIAL2
+        private Object[] partialResult;
 
-		// For FINAL and COMPLETE
-		private DoubleWritable result;
+        // For FINAL and COMPLETE
+        private DoubleWritable result;
 
-		@Override
-		public ObjectInspector init(Mode m, ObjectInspector[] parameters)
-				throws HiveException {
-			super.init(m, parameters);
+        @Override
+        public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
+            super.init(m, parameters);
 
-			// init input
-			if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {
-				assert (parameters.length == 2);
-				xInputOI = (PrimitiveObjectInspector) parameters[0];
-				yInputOI = (PrimitiveObjectInspector) parameters[1];
-			} else {
-				assert (parameters.length == 1);
-				soi = (StructObjectInspector) parameters[0];
+            // init input
+            if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {
+                assert (parameters.length == 2);
+                xInputOI = (PrimitiveObjectInspector) parameters[0];
+                yInputOI = (PrimitiveObjectInspector) parameters[1];
+            } else {
+                assert (parameters.length == 1);
+                soi = (StructObjectInspector) parameters[0];
 
-				countField = soi.getStructFieldRef("count");
-				xavgField = soi.getStructFieldRef("xavg");
-				yavgField = soi.getStructFieldRef("yavg");
-				xvarField = soi.getStructFieldRef("xvar");
-				yvarField = soi.getStructFieldRef("yvar");
-				covarField = soi.getStructFieldRef("covar");
+                countField = soi.getStructFieldRef("count");
+                xavgField = soi.getStructFieldRef("xavg");
+                yavgField = soi.getStructFieldRef("yavg");
+                xvarField = soi.getStructFieldRef("xvar");
+                yvarField = soi.getStructFieldRef("yvar");
+                covarField = soi.getStructFieldRef("covar");
 
-				countFieldOI = (LongObjectInspector) countField
-						.getFieldObjectInspector();
-				xavgFieldOI = (DoubleObjectInspector) xavgField
-						.getFieldObjectInspector();
-				yavgFieldOI = (DoubleObjectInspector) yavgField
-						.getFieldObjectInspector();
-				xvarFieldOI = (DoubleObjectInspector) xvarField
-						.getFieldObjectInspector();
-				yvarFieldOI = (DoubleObjectInspector) yvarField
-						.getFieldObjectInspector();
-				covarFieldOI = (DoubleObjectInspector) covarField
-						.getFieldObjectInspector();
-			}
+                countFieldOI = (LongObjectInspector) countField.getFieldObjectInspector();
+                xavgFieldOI = (DoubleObjectInspector) xavgField.getFieldObjectInspector();
+                yavgFieldOI = (DoubleObjectInspector) yavgField.getFieldObjectInspector();
+                xvarFieldOI = (DoubleObjectInspector) xvarField.getFieldObjectInspector();
+                yvarFieldOI = (DoubleObjectInspector) yvarField.getFieldObjectInspector();
+                covarFieldOI = (DoubleObjectInspector) covarField.getFieldObjectInspector();
+            }
 
-			// init output
-			if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {
-				// The output of a partial aggregation is a struct containing
-				// a long count, two double averages, two double variances,
-				// and a double covariance.
+            // init output
+            if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {
+                // The output of a partial aggregation is a struct containing
+                // a long count, two double averages, two double variances,
+                // and a double covariance.
 
-				ArrayList<ObjectInspector> foi = new ArrayList<ObjectInspector>();
+                ArrayList<ObjectInspector> foi = new ArrayList<ObjectInspector>();
 
-				foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
-				foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
-				foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
-				foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
-				foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
-				foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+                foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
+                foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+                foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+                foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+                foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+                foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
 
-				ArrayList<String> fname = new ArrayList<String>();
-				fname.add("count");
-				fname.add("xavg");
-				fname.add("yavg");
-				fname.add("xvar");
-				fname.add("yvar");
-				fname.add("covar");
+                ArrayList<String> fname = new ArrayList<String>();
+                fname.add("count");
+                fname.add("xavg");
+                fname.add("yavg");
+                fname.add("xvar");
+                fname.add("yvar");
+                fname.add("covar");
 
-				partialResult = new Object[6];
-				partialResult[0] = new LongWritable(0);
-				partialResult[1] = new DoubleWritable(0);
-				partialResult[2] = new DoubleWritable(0);
-				partialResult[3] = new DoubleWritable(0);
-				partialResult[4] = new DoubleWritable(0);
-				partialResult[5] = new DoubleWritable(0);
+                partialResult = new Object[6];
+                partialResult[0] = new LongWritable(0);
+                partialResult[1] = new DoubleWritable(0);
+                partialResult[2] = new DoubleWritable(0);
+                partialResult[3] = new DoubleWritable(0);
+                partialResult[4] = new DoubleWritable(0);
+                partialResult[5] = new DoubleWritable(0);
 
-				return ObjectInspectorFactory.getStandardStructObjectInspector(
-						fname, foi);
+                return ObjectInspectorFactory.getStandardStructObjectInspector(fname, foi);
 
-			} else {
-				setResult(new DoubleWritable(0));
-				return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
-			}
-		}
+            } else {
+                setResult(new DoubleWritable(0));
+                return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
+            }
+        }
 
-		static class StdAgg implements SerializableBuffer {
-			long count; // number n of elements
-			double xavg; // average of x elements
-			double yavg; // average of y elements
-			double xvar; // n times the variance of x elements
-			double yvar; // n times the variance of y elements
-			double covar; // n times the covariance
+        static class StdAgg implements SerializableBuffer {
+            long count; // number n of elements
+            double xavg; // average of x elements
+            double yavg; // average of y elements
+            double xvar; // n times the variance of x elements
+            double yvar; // n times the variance of y elements
+            double covar; // n times the covariance
 
-			@Override
-			public void deSerializeAggBuffer(byte[] data, int start, int len) {
-				count = BufferSerDeUtil.getLong(data, start);
-				start += 8;
-				xavg = BufferSerDeUtil.getDouble(data, start);
-				start += 8;
-				yavg = BufferSerDeUtil.getDouble(data, start);
-				start += 8;
-				xvar = BufferSerDeUtil.getDouble(data, start);
-				start += 8;
-				yvar = BufferSerDeUtil.getDouble(data, start);
-				start += 8;
-				covar = BufferSerDeUtil.getDouble(data, start);
-			}
+            @Override
+            public void deSerializeAggBuffer(byte[] data, int start, int len) {
+                count = BufferSerDeUtil.getLong(data, start);
+                start += 8;
+                xavg = BufferSerDeUtil.getDouble(data, start);
+                start += 8;
+                yavg = BufferSerDeUtil.getDouble(data, start);
+                start += 8;
+                xvar = BufferSerDeUtil.getDouble(data, start);
+                start += 8;
+                yvar = BufferSerDeUtil.getDouble(data, start);
+                start += 8;
+                covar = BufferSerDeUtil.getDouble(data, start);
+            }
 
-			@Override
-			public void serializeAggBuffer(byte[] data, int start, int len) {
-				BufferSerDeUtil.writeLong(count, data, start);
-				start += 8;
-				BufferSerDeUtil.writeDouble(xavg, data, start);
-				start += 8;
-				BufferSerDeUtil.writeDouble(yavg, data, start);
-				start += 8;
-				BufferSerDeUtil.writeDouble(xvar, data, start);
-				start += 8;
-				BufferSerDeUtil.writeDouble(yvar, data, start);
-				start += 8;
-				BufferSerDeUtil.writeDouble(covar, data, start);
-			}
+            @Override
+            public void serializeAggBuffer(byte[] data, int start, int len) {
+                BufferSerDeUtil.writeLong(count, data, start);
+                start += 8;
+                BufferSerDeUtil.writeDouble(xavg, data, start);
+                start += 8;
+                BufferSerDeUtil.writeDouble(yavg, data, start);
+                start += 8;
+                BufferSerDeUtil.writeDouble(xvar, data, start);
+                start += 8;
+                BufferSerDeUtil.writeDouble(yvar, data, start);
+                start += 8;
+                BufferSerDeUtil.writeDouble(covar, data, start);
+            }
 
-			@Override
-			public void serializeAggBuffer(DataOutput output)
-					throws IOException {
-				output.writeLong(count);
-				output.writeDouble(xavg);
-				output.writeDouble(yavg);
-				output.writeDouble(xvar);
-				output.writeDouble(yvar);
-				output.writeDouble(covar);
-			}
-		};
+            @Override
+            public void serializeAggBuffer(DataOutput output) throws IOException {
+                output.writeLong(count);
+                output.writeDouble(xavg);
+                output.writeDouble(yavg);
+                output.writeDouble(xvar);
+                output.writeDouble(yvar);
+                output.writeDouble(covar);
+            }
+        };
 
-		@Override
-		public AggregationBuffer getNewAggregationBuffer() throws HiveException {
-			StdAgg result = new StdAgg();
-			reset(result);
-			return result;
-		}
+        @Override
+        public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+            StdAgg result = new StdAgg();
+            reset(result);
+            return result;
+        }
 
-		@Override
-		public void reset(AggregationBuffer agg) throws HiveException {
-			StdAgg myagg = (StdAgg) agg;
-			myagg.count = 0;
-			myagg.xavg = 0;
-			myagg.yavg = 0;
-			myagg.xvar = 0;
-			myagg.yvar = 0;
-			myagg.covar = 0;
-		}
+        @Override
+        public void reset(AggregationBuffer agg) throws HiveException {
+            StdAgg myagg = (StdAgg) agg;
+            myagg.count = 0;
+            myagg.xavg = 0;
+            myagg.yavg = 0;
+            myagg.xvar = 0;
+            myagg.yvar = 0;
+            myagg.covar = 0;
+        }
 
-		@Override
-		public void iterate(AggregationBuffer agg, Object[] parameters)
-				throws HiveException {
-			assert (parameters.length == 2);
-			Object px = parameters[0];
-			Object py = parameters[1];
-			if (px != null && py != null) {
-				StdAgg myagg = (StdAgg) agg;
-				double vx = PrimitiveObjectInspectorUtils.getDouble(px,
-						xInputOI);
-				double vy = PrimitiveObjectInspectorUtils.getDouble(py,
-						yInputOI);
-				double xavgOld = myagg.xavg;
-				double yavgOld = myagg.yavg;
-				myagg.count++;
-				myagg.xavg += (vx - xavgOld) / myagg.count;
-				myagg.yavg += (vy - yavgOld) / myagg.count;
-				if (myagg.count > 1) {
-					myagg.covar += (vx - xavgOld) * (vy - myagg.yavg);
-					myagg.xvar += (vx - xavgOld) * (vx - myagg.xavg);
-					myagg.yvar += (vy - yavgOld) * (vy - myagg.yavg);
-				}
-			}
-		}
+        @Override
+        public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
+            assert (parameters.length == 2);
+            Object px = parameters[0];
+            Object py = parameters[1];
+            if (px != null && py != null) {
+                StdAgg myagg = (StdAgg) agg;
+                double vx = PrimitiveObjectInspectorUtils.getDouble(px, xInputOI);
+                double vy = PrimitiveObjectInspectorUtils.getDouble(py, yInputOI);
+                double xavgOld = myagg.xavg;
+                double yavgOld = myagg.yavg;
+                myagg.count++;
+                myagg.xavg += (vx - xavgOld) / myagg.count;
+                myagg.yavg += (vy - yavgOld) / myagg.count;
+                if (myagg.count > 1) {
+                    myagg.covar += (vx - xavgOld) * (vy - myagg.yavg);
+                    myagg.xvar += (vx - xavgOld) * (vx - myagg.xavg);
+                    myagg.yvar += (vy - yavgOld) * (vy - myagg.yavg);
+                }
+            }
+        }
 
-		@Override
-		public Object terminatePartial(AggregationBuffer agg)
-				throws HiveException {
-			StdAgg myagg = (StdAgg) agg;
-			((LongWritable) partialResult[0]).set(myagg.count);
-			((DoubleWritable) partialResult[1]).set(myagg.xavg);
-			((DoubleWritable) partialResult[2]).set(myagg.yavg);
-			((DoubleWritable) partialResult[3]).set(myagg.xvar);
-			((DoubleWritable) partialResult[4]).set(myagg.yvar);
-			((DoubleWritable) partialResult[5]).set(myagg.covar);
-			return partialResult;
-		}
+        @Override
+        public Object terminatePartial(AggregationBuffer agg) throws HiveException {
+            StdAgg myagg = (StdAgg) agg;
+            ((LongWritable) partialResult[0]).set(myagg.count);
+            ((DoubleWritable) partialResult[1]).set(myagg.xavg);
+            ((DoubleWritable) partialResult[2]).set(myagg.yavg);
+            ((DoubleWritable) partialResult[3]).set(myagg.xvar);
+            ((DoubleWritable) partialResult[4]).set(myagg.yvar);
+            ((DoubleWritable) partialResult[5]).set(myagg.covar);
+            return partialResult;
+        }
 
-		@Override
-		public void merge(AggregationBuffer agg, Object partial)
-				throws HiveException {
-			if (partial != null) {
-				StdAgg myagg = (StdAgg) agg;
+        @Override
+        public void merge(AggregationBuffer agg, Object partial) throws HiveException {
+            if (partial != null) {
+                StdAgg myagg = (StdAgg) agg;
 
-				Object partialCount = soi.getStructFieldData(partial,
-						countField);
-				Object partialXAvg = soi.getStructFieldData(partial, xavgField);
-				Object partialYAvg = soi.getStructFieldData(partial, yavgField);
-				Object partialXVar = soi.getStructFieldData(partial, xvarField);
-				Object partialYVar = soi.getStructFieldData(partial, yvarField);
-				Object partialCovar = soi.getStructFieldData(partial,
-						covarField);
+                Object partialCount = soi.getStructFieldData(partial, countField);
+                Object partialXAvg = soi.getStructFieldData(partial, xavgField);
+                Object partialYAvg = soi.getStructFieldData(partial, yavgField);
+                Object partialXVar = soi.getStructFieldData(partial, xvarField);
+                Object partialYVar = soi.getStructFieldData(partial, yvarField);
+                Object partialCovar = soi.getStructFieldData(partial, covarField);
 
-				long nA = myagg.count;
-				long nB = countFieldOI.get(partialCount);
+                long nA = myagg.count;
+                long nB = countFieldOI.get(partialCount);
 
-				if (nA == 0) {
-					// Just copy the information since there is nothing so far
-					myagg.count = countFieldOI.get(partialCount);
-					myagg.xavg = xavgFieldOI.get(partialXAvg);
-					myagg.yavg = yavgFieldOI.get(partialYAvg);
-					myagg.xvar = xvarFieldOI.get(partialXVar);
-					myagg.yvar = yvarFieldOI.get(partialYVar);
-					myagg.covar = covarFieldOI.get(partialCovar);
-				}
+                if (nA == 0) {
+                    // Just copy the information since there is nothing so far
+                    myagg.count = countFieldOI.get(partialCount);
+                    myagg.xavg = xavgFieldOI.get(partialXAvg);
+                    myagg.yavg = yavgFieldOI.get(partialYAvg);
+                    myagg.xvar = xvarFieldOI.get(partialXVar);
+                    myagg.yvar = yvarFieldOI.get(partialYVar);
+                    myagg.covar = covarFieldOI.get(partialCovar);
+                }
 
-				if (nA != 0 && nB != 0) {
-					// Merge the two partials
-					double xavgA = myagg.xavg;
-					double yavgA = myagg.yavg;
-					double xavgB = xavgFieldOI.get(partialXAvg);
-					double yavgB = yavgFieldOI.get(partialYAvg);
-					double xvarB = xvarFieldOI.get(partialXVar);
-					double yvarB = yvarFieldOI.get(partialYVar);
-					double covarB = covarFieldOI.get(partialCovar);
+                if (nA != 0 && nB != 0) {
+                    // Merge the two partials
+                    double xavgA = myagg.xavg;
+                    double yavgA = myagg.yavg;
+                    double xavgB = xavgFieldOI.get(partialXAvg);
+                    double yavgB = yavgFieldOI.get(partialYAvg);
+                    double xvarB = xvarFieldOI.get(partialXVar);
+                    double yvarB = yvarFieldOI.get(partialYVar);
+                    double covarB = covarFieldOI.get(partialCovar);
 
-					myagg.count += nB;
-					myagg.xavg = (xavgA * nA + xavgB * nB) / myagg.count;
-					myagg.yavg = (yavgA * nA + yavgB * nB) / myagg.count;
-					myagg.xvar += xvarB + (xavgA - xavgB) * (xavgA - xavgB)
-							* myagg.count;
-					myagg.yvar += yvarB + (yavgA - yavgB) * (yavgA - yavgB)
-							* myagg.count;
-					myagg.covar += covarB + (xavgA - xavgB) * (yavgA - yavgB)
-							* ((double) (nA * nB) / myagg.count);
-				}
-			}
-		}
+                    myagg.count += nB;
+                    myagg.xavg = (xavgA * nA + xavgB * nB) / myagg.count;
+                    myagg.yavg = (yavgA * nA + yavgB * nB) / myagg.count;
+                    myagg.xvar += xvarB + (xavgA - xavgB) * (xavgA - xavgB) * myagg.count;
+                    myagg.yvar += yvarB + (yavgA - yavgB) * (yavgA - yavgB) * myagg.count;
+                    myagg.covar += covarB + (xavgA - xavgB) * (yavgA - yavgB) * ((double) (nA * nB) / myagg.count);
+                }
+            }
+        }
 
-		@Override
-		public Object terminate(AggregationBuffer agg) throws HiveException {
-			StdAgg myagg = (StdAgg) agg;
+        @Override
+        public Object terminate(AggregationBuffer agg) throws HiveException {
+            StdAgg myagg = (StdAgg) agg;
 
-			if (myagg.count < 2) { // SQL standard - return null for zero or one
-									// pair
-				return null;
-			} else {
-				getResult().set(
-						myagg.covar / java.lang.Math.sqrt(myagg.xvar)
-								/ java.lang.Math.sqrt(myagg.yvar));
-				return getResult();
-			}
-		}
+            if (myagg.count < 2) { // SQL standard - return null for zero or one
+                                   // pair
+                return null;
+            } else {
+                getResult().set(myagg.covar / java.lang.Math.sqrt(myagg.xvar) / java.lang.Math.sqrt(myagg.yvar));
+                return getResult();
+            }
+        }
 
-		public void setResult(DoubleWritable result) {
-			this.result = result;
-		}
+        public void setResult(DoubleWritable result) {
+            this.result = result;
+        }
 
-		public DoubleWritable getResult() {
-			return result;
-		}
-	}
+        public DoubleWritable getResult() {
+            return result;
+        }
+    }
 
 }
diff --git a/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java b/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java
index 4160d5b..dc5eef0 100644
--- a/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java
+++ b/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java
@@ -37,146 +37,134 @@
  * This class implements the COUNT aggregation function as in SQL.
  */
 @Description(name = "count", value = "_FUNC_(*) - Returns the total number of retrieved rows, including "
-		+ "rows containing NULL values.\n"
+        + "rows containing NULL values.\n"
 
-		+ "_FUNC_(expr) - Returns the number of rows for which the supplied "
-		+ "expression is non-NULL.\n"
+        + "_FUNC_(expr) - Returns the number of rows for which the supplied " + "expression is non-NULL.\n"
 
-		+ "_FUNC_(DISTINCT expr[, expr...]) - Returns the number of rows for "
-		+ "which the supplied expression(s) are unique and non-NULL.")
+        + "_FUNC_(DISTINCT expr[, expr...]) - Returns the number of rows for "
+        + "which the supplied expression(s) are unique and non-NULL.")
 public class GenericUDAFCount implements GenericUDAFResolver2 {
 
-	@Override
-	public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters)
-			throws SemanticException {
-		// This method implementation is preserved for backward compatibility.
-		return new GenericUDAFCountEvaluator();
-	}
+    @Override
+    public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException {
+        // This method implementation is preserved for backward compatibility.
+        return new GenericUDAFCountEvaluator();
+    }
 
-	@Override
-	public GenericUDAFEvaluator getEvaluator(GenericUDAFParameterInfo paramInfo)
-			throws SemanticException {
+    @Override
+    public GenericUDAFEvaluator getEvaluator(GenericUDAFParameterInfo paramInfo) throws SemanticException {
 
-		TypeInfo[] parameters = paramInfo.getParameters();
+        TypeInfo[] parameters = paramInfo.getParameters();
 
-		if (parameters.length == 0) {
-			if (!paramInfo.isAllColumns()) {
-				throw new UDFArgumentException("Argument expected");
-			}
-			assert !paramInfo.isDistinct() : "DISTINCT not supported with *";
-		} else {
-			if (parameters.length > 1 && !paramInfo.isDistinct()) {
-				throw new UDFArgumentException(
-						"DISTINCT keyword must be specified");
-			}
-			assert !paramInfo.isAllColumns() : "* not supported in expression list";
-		}
+        if (parameters.length == 0) {
+            if (!paramInfo.isAllColumns()) {
+                throw new UDFArgumentException("Argument expected");
+            }
+            assert !paramInfo.isDistinct() : "DISTINCT not supported with *";
+        } else {
+            if (parameters.length > 1 && !paramInfo.isDistinct()) {
+                throw new UDFArgumentException("DISTINCT keyword must be specified");
+            }
+            assert !paramInfo.isAllColumns() : "* not supported in expression list";
+        }
 
-		return new GenericUDAFCountEvaluator().setCountAllColumns(paramInfo
-				.isAllColumns());
-	}
+        return new GenericUDAFCountEvaluator().setCountAllColumns(paramInfo.isAllColumns());
+    }
 
-	/**
-	 * GenericUDAFCountEvaluator.
-	 * 
-	 */
-	public static class GenericUDAFCountEvaluator extends GenericUDAFEvaluator {
-		private boolean countAllColumns = false;
-		private LongObjectInspector partialCountAggOI;
-		private LongWritable result;
+    /**
+     * GenericUDAFCountEvaluator.
+     */
+    public static class GenericUDAFCountEvaluator extends GenericUDAFEvaluator {
+        private boolean countAllColumns = false;
+        private LongObjectInspector partialCountAggOI;
+        private LongWritable result;
 
-		@Override
-		public ObjectInspector init(Mode m, ObjectInspector[] parameters)
-				throws HiveException {
-			super.init(m, parameters);
-			partialCountAggOI = PrimitiveObjectInspectorFactory.writableLongObjectInspector;
-			result = new LongWritable(0);
-			return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
-		}
+        @Override
+        public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
+            super.init(m, parameters);
+            partialCountAggOI = PrimitiveObjectInspectorFactory.writableLongObjectInspector;
+            result = new LongWritable(0);
+            return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
+        }
 
-		private GenericUDAFCountEvaluator setCountAllColumns(
-				boolean countAllCols) {
-			countAllColumns = countAllCols;
-			return this;
-		}
+        private GenericUDAFCountEvaluator setCountAllColumns(boolean countAllCols) {
+            countAllColumns = countAllCols;
+            return this;
+        }
 
-		/** class for storing count value. */
-		static class CountAgg implements SerializableBuffer {
-			long value;
+        /** class for storing count value. */
+        static class CountAgg implements SerializableBuffer {
+            long value;
 
-			@Override
-			public void deSerializeAggBuffer(byte[] data, int start, int len) {
-				value = BufferSerDeUtil.getLong(data, start);
-			}
+            @Override
+            public void deSerializeAggBuffer(byte[] data, int start, int len) {
+                value = BufferSerDeUtil.getLong(data, start);
+            }
 
-			@Override
-			public void serializeAggBuffer(byte[] data, int start, int len) {
-				BufferSerDeUtil.writeLong(value, data, start);
-			}
+            @Override
+            public void serializeAggBuffer(byte[] data, int start, int len) {
+                BufferSerDeUtil.writeLong(value, data, start);
+            }
 
-			@Override
-			public void serializeAggBuffer(DataOutput output)
-					throws IOException {
-				output.writeLong(value);
-			}
-		}
+            @Override
+            public void serializeAggBuffer(DataOutput output) throws IOException {
+                output.writeLong(value);
+            }
+        }
 
-		@Override
-		public AggregationBuffer getNewAggregationBuffer() throws HiveException {
-			CountAgg buffer = new CountAgg();
-			reset(buffer);
-			return buffer;
-		}
+        @Override
+        public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+            CountAgg buffer = new CountAgg();
+            reset(buffer);
+            return buffer;
+        }
 
-		@Override
-		public void reset(AggregationBuffer agg) throws HiveException {
-			((CountAgg) agg).value = 0;
-		}
+        @Override
+        public void reset(AggregationBuffer agg) throws HiveException {
+            ((CountAgg) agg).value = 0;
+        }
 
-		@Override
-		public void iterate(AggregationBuffer agg, Object[] parameters)
-				throws HiveException {
-			// parameters == null means the input table/split is empty
-			if (parameters == null) {
-				return;
-			}
-			if (countAllColumns) {
-				assert parameters.length == 0;
-				((CountAgg) agg).value++;
-			} else {
-				assert parameters.length > 0;
-				boolean countThisRow = true;
-				for (Object nextParam : parameters) {
-					if (nextParam == null) {
-						countThisRow = false;
-						break;
-					}
-				}
-				if (countThisRow) {
-					((CountAgg) agg).value++;
-				}
-			}
-		}
+        @Override
+        public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
+            // parameters == null means the input table/split is empty
+            if (parameters == null) {
+                return;
+            }
+            if (countAllColumns) {
+                assert parameters.length == 0;
+                ((CountAgg) agg).value++;
+            } else {
+                assert parameters.length > 0;
+                boolean countThisRow = true;
+                for (Object nextParam : parameters) {
+                    if (nextParam == null) {
+                        countThisRow = false;
+                        break;
+                    }
+                }
+                if (countThisRow) {
+                    ((CountAgg) agg).value++;
+                }
+            }
+        }
 
-		@Override
-		public void merge(AggregationBuffer agg, Object partial)
-				throws HiveException {
-			if (partial != null) {
-				long p = partialCountAggOI.get(partial);
-				((CountAgg) agg).value += p;
-			}
-		}
+        @Override
+        public void merge(AggregationBuffer agg, Object partial) throws HiveException {
+            if (partial != null) {
+                long p = partialCountAggOI.get(partial);
+                ((CountAgg) agg).value += p;
+            }
+        }
 
-		@Override
-		public Object terminate(AggregationBuffer agg) throws HiveException {
-			result.set(((CountAgg) agg).value);
-			return result;
-		}
+        @Override
+        public Object terminate(AggregationBuffer agg) throws HiveException {
+            result.set(((CountAgg) agg).value);
+            return result;
+        }
 
-		@Override
-		public Object terminatePartial(AggregationBuffer agg)
-				throws HiveException {
-			return terminate(agg);
-		}
-	}
+        @Override
+        public Object terminatePartial(AggregationBuffer agg) throws HiveException {
+            return terminate(agg);
+        }
+    }
 }
diff --git a/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovariance.java b/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovariance.java
index 11d9dc3..0c4448b 100644
--- a/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovariance.java
+++ b/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovariance.java
@@ -48,325 +48,294 @@
  * Compute the covariance covar_pop(x, y), using the following one-pass method
  * (ref. "Formulas for Robust, One-Pass Parallel Computation of Covariances and
  * Arbitrary-Order Statistical Moments", Philippe Pebay, Sandia Labs):
- * 
  * Incremental: n : <count> mx_n = mx_(n-1) + [x_n - mx_(n-1)]/n : <xavg> my_n =
  * my_(n-1) + [y_n - my_(n-1)]/n : <yavg> c_n = c_(n-1) + (x_n - mx_(n-1))*(y_n
  * - my_n) : <covariance * n>
- * 
  * Merge: c_X = c_A + c_B + (mx_A - mx_B)*(my_A - my_B)*n_A*n_B/n_X
- * 
  */
 @Description(name = "covariance,covar_pop", value = "_FUNC_(x,y) - Returns the population covariance of a set of number pairs", extended = "The function takes as arguments any pair of numeric types and returns a double.\n"
-		+ "Any pair with a NULL is ignored. If the function is applied to an empty set, NULL\n"
-		+ "will be returned. Otherwise, it computes the following:\n"
-		+ "   (SUM(x*y)-SUM(x)*SUM(y)/COUNT(x,y))/COUNT(x,y)\n"
-		+ "where neither x nor y is null.")
+        + "Any pair with a NULL is ignored. If the function is applied to an empty set, NULL\n"
+        + "will be returned. Otherwise, it computes the following:\n"
+        + "   (SUM(x*y)-SUM(x)*SUM(y)/COUNT(x,y))/COUNT(x,y)\n" + "where neither x nor y is null.")
 public class GenericUDAFCovariance extends AbstractGenericUDAFResolver {
 
-	static final Log LOG = LogFactory.getLog(GenericUDAFCovariance.class
-			.getName());
+    static final Log LOG = LogFactory.getLog(GenericUDAFCovariance.class.getName());
 
-	@Override
-	public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters)
-			throws SemanticException {
-		if (parameters.length != 2) {
-			throw new UDFArgumentTypeException(parameters.length - 1,
-					"Exactly two arguments are expected.");
-		}
+    @Override
+    public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException {
+        if (parameters.length != 2) {
+            throw new UDFArgumentTypeException(parameters.length - 1, "Exactly two arguments are expected.");
+        }
 
-		if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
-			throw new UDFArgumentTypeException(0,
-					"Only primitive type arguments are accepted but "
-							+ parameters[0].getTypeName() + " is passed.");
-		}
+        if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+            throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but "
+                    + parameters[0].getTypeName() + " is passed.");
+        }
 
-		if (parameters[1].getCategory() != ObjectInspector.Category.PRIMITIVE) {
-			throw new UDFArgumentTypeException(1,
-					"Only primitive type arguments are accepted but "
-							+ parameters[1].getTypeName() + " is passed.");
-		}
+        if (parameters[1].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+            throw new UDFArgumentTypeException(1, "Only primitive type arguments are accepted but "
+                    + parameters[1].getTypeName() + " is passed.");
+        }
 
-		switch (((PrimitiveTypeInfo) parameters[0]).getPrimitiveCategory()) {
-		case BYTE:
-		case SHORT:
-		case INT:
-		case LONG:
-		case FLOAT:
-		case DOUBLE:
-			switch (((PrimitiveTypeInfo) parameters[1]).getPrimitiveCategory()) {
-			case BYTE:
-			case SHORT:
-			case INT:
-			case LONG:
-			case FLOAT:
-			case DOUBLE:
-				return new GenericUDAFCovarianceEvaluator();
-			case STRING:
-			case BOOLEAN:
-			default:
-				throw new UDFArgumentTypeException(1,
-						"Only numeric or string type arguments are accepted but "
-								+ parameters[1].getTypeName() + " is passed.");
-			}
-		case STRING:
-		case BOOLEAN:
-		default:
-			throw new UDFArgumentTypeException(0,
-					"Only numeric or string type arguments are accepted but "
-							+ parameters[0].getTypeName() + " is passed.");
-		}
-	}
+        switch (((PrimitiveTypeInfo) parameters[0]).getPrimitiveCategory()) {
+            case BYTE:
+            case SHORT:
+            case INT:
+            case LONG:
+            case FLOAT:
+            case DOUBLE:
+                switch (((PrimitiveTypeInfo) parameters[1]).getPrimitiveCategory()) {
+                    case BYTE:
+                    case SHORT:
+                    case INT:
+                    case LONG:
+                    case FLOAT:
+                    case DOUBLE:
+                        return new GenericUDAFCovarianceEvaluator();
+                    case STRING:
+                    case BOOLEAN:
+                    default:
+                        throw new UDFArgumentTypeException(1, "Only numeric or string type arguments are accepted but "
+                                + parameters[1].getTypeName() + " is passed.");
+                }
+            case STRING:
+            case BOOLEAN:
+            default:
+                throw new UDFArgumentTypeException(0, "Only numeric or string type arguments are accepted but "
+                        + parameters[0].getTypeName() + " is passed.");
+        }
+    }
 
-	/**
-	 * Evaluate the variance using the algorithm described in
-	 * http://en.wikipedia.org/wiki/Algorithms_for_calculating_variance,
-	 * presumably by Pébay, Philippe (2008), in "Formulas for Robust, One-Pass
-	 * Parallel Computation of Covariances and Arbitrary-Order Statistical
-	 * Moments", Technical Report SAND2008-6212, Sandia National Laboratories,
-	 * http://infoserve.sandia.gov/sand_doc/2008/086212.pdf
-	 * 
-	 * Incremental: n : <count> mx_n = mx_(n-1) + [x_n - mx_(n-1)]/n : <xavg>
-	 * my_n = my_(n-1) + [y_n - my_(n-1)]/n : <yavg> c_n = c_(n-1) + (x_n -
-	 * mx_(n-1))*(y_n - my_n) : <covariance * n>
-	 * 
-	 * Merge: c_X = c_A + c_B + (mx_A - mx_B)*(my_A - my_B)*n_A*n_B/n_X
-	 * 
-	 * This one-pass algorithm is stable.
-	 * 
-	 */
-	public static class GenericUDAFCovarianceEvaluator extends
-			GenericUDAFEvaluator {
+    /**
+     * Evaluate the variance using the algorithm described in
+     * http://en.wikipedia.org/wiki/Algorithms_for_calculating_variance,
+     * presumably by Pébay, Philippe (2008), in "Formulas for Robust, One-Pass
+     * Parallel Computation of Covariances and Arbitrary-Order Statistical
+     * Moments", Technical Report SAND2008-6212, Sandia National Laboratories,
+     * http://infoserve.sandia.gov/sand_doc/2008/086212.pdf
+     * Incremental: n : <count> mx_n = mx_(n-1) + [x_n - mx_(n-1)]/n : <xavg>
+     * my_n = my_(n-1) + [y_n - my_(n-1)]/n : <yavg> c_n = c_(n-1) + (x_n -
+     * mx_(n-1))*(y_n - my_n) : <covariance * n>
+     * Merge: c_X = c_A + c_B + (mx_A - mx_B)*(my_A - my_B)*n_A*n_B/n_X
+     * This one-pass algorithm is stable.
+     */
+    public static class GenericUDAFCovarianceEvaluator extends GenericUDAFEvaluator {
 
-		// For PARTIAL1 and COMPLETE
-		private PrimitiveObjectInspector xInputOI;
-		private PrimitiveObjectInspector yInputOI;
+        // For PARTIAL1 and COMPLETE
+        private PrimitiveObjectInspector xInputOI;
+        private PrimitiveObjectInspector yInputOI;
 
-		// For PARTIAL2 and FINAL
-		private StructObjectInspector soi;
-		private StructField countField;
-		private StructField xavgField;
-		private StructField yavgField;
-		private StructField covarField;
-		private LongObjectInspector countFieldOI;
-		private DoubleObjectInspector xavgFieldOI;
-		private DoubleObjectInspector yavgFieldOI;
-		private DoubleObjectInspector covarFieldOI;
+        // For PARTIAL2 and FINAL
+        private StructObjectInspector soi;
+        private StructField countField;
+        private StructField xavgField;
+        private StructField yavgField;
+        private StructField covarField;
+        private LongObjectInspector countFieldOI;
+        private DoubleObjectInspector xavgFieldOI;
+        private DoubleObjectInspector yavgFieldOI;
+        private DoubleObjectInspector covarFieldOI;
 
-		// For PARTIAL1 and PARTIAL2
-		private Object[] partialResult;
+        // For PARTIAL1 and PARTIAL2
+        private Object[] partialResult;
 
-		// For FINAL and COMPLETE
-		private DoubleWritable result;
+        // For FINAL and COMPLETE
+        private DoubleWritable result;
 
-		@Override
-		public ObjectInspector init(Mode m, ObjectInspector[] parameters)
-				throws HiveException {
-			super.init(m, parameters);
+        @Override
+        public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
+            super.init(m, parameters);
 
-			// init input
-			if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {
-				assert (parameters.length == 2);
-				xInputOI = (PrimitiveObjectInspector) parameters[0];
-				yInputOI = (PrimitiveObjectInspector) parameters[1];
-			} else {
-				assert (parameters.length == 1);
-				soi = (StructObjectInspector) parameters[0];
+            // init input
+            if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {
+                assert (parameters.length == 2);
+                xInputOI = (PrimitiveObjectInspector) parameters[0];
+                yInputOI = (PrimitiveObjectInspector) parameters[1];
+            } else {
+                assert (parameters.length == 1);
+                soi = (StructObjectInspector) parameters[0];
 
-				countField = soi.getStructFieldRef("count");
-				xavgField = soi.getStructFieldRef("xavg");
-				yavgField = soi.getStructFieldRef("yavg");
-				covarField = soi.getStructFieldRef("covar");
+                countField = soi.getStructFieldRef("count");
+                xavgField = soi.getStructFieldRef("xavg");
+                yavgField = soi.getStructFieldRef("yavg");
+                covarField = soi.getStructFieldRef("covar");
 
-				countFieldOI = (LongObjectInspector) countField
-						.getFieldObjectInspector();
-				xavgFieldOI = (DoubleObjectInspector) xavgField
-						.getFieldObjectInspector();
-				yavgFieldOI = (DoubleObjectInspector) yavgField
-						.getFieldObjectInspector();
-				covarFieldOI = (DoubleObjectInspector) covarField
-						.getFieldObjectInspector();
-			}
+                countFieldOI = (LongObjectInspector) countField.getFieldObjectInspector();
+                xavgFieldOI = (DoubleObjectInspector) xavgField.getFieldObjectInspector();
+                yavgFieldOI = (DoubleObjectInspector) yavgField.getFieldObjectInspector();
+                covarFieldOI = (DoubleObjectInspector) covarField.getFieldObjectInspector();
+            }
 
-			// init output
-			if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {
-				// The output of a partial aggregation is a struct containing
-				// a long count, two double averages, and a double covariance.
+            // init output
+            if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {
+                // The output of a partial aggregation is a struct containing
+                // a long count, two double averages, and a double covariance.
 
-				ArrayList<ObjectInspector> foi = new ArrayList<ObjectInspector>();
+                ArrayList<ObjectInspector> foi = new ArrayList<ObjectInspector>();
 
-				foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
-				foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
-				foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
-				foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+                foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
+                foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+                foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+                foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
 
-				ArrayList<String> fname = new ArrayList<String>();
-				fname.add("count");
-				fname.add("xavg");
-				fname.add("yavg");
-				fname.add("covar");
+                ArrayList<String> fname = new ArrayList<String>();
+                fname.add("count");
+                fname.add("xavg");
+                fname.add("yavg");
+                fname.add("covar");
 
-				partialResult = new Object[4];
-				partialResult[0] = new LongWritable(0);
-				partialResult[1] = new DoubleWritable(0);
-				partialResult[2] = new DoubleWritable(0);
-				partialResult[3] = new DoubleWritable(0);
+                partialResult = new Object[4];
+                partialResult[0] = new LongWritable(0);
+                partialResult[1] = new DoubleWritable(0);
+                partialResult[2] = new DoubleWritable(0);
+                partialResult[3] = new DoubleWritable(0);
 
-				return ObjectInspectorFactory.getStandardStructObjectInspector(
-						fname, foi);
+                return ObjectInspectorFactory.getStandardStructObjectInspector(fname, foi);
 
-			} else {
-				setResult(new DoubleWritable(0));
-				return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
-			}
-		}
+            } else {
+                setResult(new DoubleWritable(0));
+                return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
+            }
+        }
 
-		static class StdAgg implements SerializableBuffer {
-			long count; // number n of elements
-			double xavg; // average of x elements
-			double yavg; // average of y elements
-			double covar; // n times the covariance
+        static class StdAgg implements SerializableBuffer {
+            long count; // number n of elements
+            double xavg; // average of x elements
+            double yavg; // average of y elements
+            double covar; // n times the covariance
 
-			@Override
-			public void deSerializeAggBuffer(byte[] data, int start, int len) {
-				count = BufferSerDeUtil.getLong(data, start);
-				start += 8;
-				xavg = BufferSerDeUtil.getDouble(data, start);
-				start += 8;
-				yavg = BufferSerDeUtil.getDouble(data, start);
-				start += 8;
-				covar = BufferSerDeUtil.getDouble(data, start);
-			}
+            @Override
+            public void deSerializeAggBuffer(byte[] data, int start, int len) {
+                count = BufferSerDeUtil.getLong(data, start);
+                start += 8;
+                xavg = BufferSerDeUtil.getDouble(data, start);
+                start += 8;
+                yavg = BufferSerDeUtil.getDouble(data, start);
+                start += 8;
+                covar = BufferSerDeUtil.getDouble(data, start);
+            }
 
-			@Override
-			public void serializeAggBuffer(byte[] data, int start, int len) {
-				BufferSerDeUtil.writeLong(count, data, start);
-				start += 8;
-				BufferSerDeUtil.writeDouble(xavg, data, start);
-				start += 8;
-				BufferSerDeUtil.writeDouble(yavg, data, start);
-				start += 8;
-				BufferSerDeUtil.writeDouble(covar, data, start);
-			}
+            @Override
+            public void serializeAggBuffer(byte[] data, int start, int len) {
+                BufferSerDeUtil.writeLong(count, data, start);
+                start += 8;
+                BufferSerDeUtil.writeDouble(xavg, data, start);
+                start += 8;
+                BufferSerDeUtil.writeDouble(yavg, data, start);
+                start += 8;
+                BufferSerDeUtil.writeDouble(covar, data, start);
+            }
 
-			@Override
-			public void serializeAggBuffer(DataOutput output)
-					throws IOException {
-				output.writeLong(count);
-				output.writeDouble(xavg);
-				output.writeDouble(yavg);
-				output.writeDouble(covar);
-			}
-		};
+            @Override
+            public void serializeAggBuffer(DataOutput output) throws IOException {
+                output.writeLong(count);
+                output.writeDouble(xavg);
+                output.writeDouble(yavg);
+                output.writeDouble(covar);
+            }
+        };
 
-		@Override
-		public AggregationBuffer getNewAggregationBuffer() throws HiveException {
-			StdAgg result = new StdAgg();
-			reset(result);
-			return result;
-		}
+        @Override
+        public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+            StdAgg result = new StdAgg();
+            reset(result);
+            return result;
+        }
 
-		@Override
-		public void reset(AggregationBuffer agg) throws HiveException {
-			StdAgg myagg = (StdAgg) agg;
-			myagg.count = 0;
-			myagg.xavg = 0;
-			myagg.yavg = 0;
-			myagg.covar = 0;
-		}
+        @Override
+        public void reset(AggregationBuffer agg) throws HiveException {
+            StdAgg myagg = (StdAgg) agg;
+            myagg.count = 0;
+            myagg.xavg = 0;
+            myagg.yavg = 0;
+            myagg.covar = 0;
+        }
 
-		@Override
-		public void iterate(AggregationBuffer agg, Object[] parameters)
-				throws HiveException {
-			assert (parameters.length == 2);
-			Object px = parameters[0];
-			Object py = parameters[1];
-			if (px != null && py != null) {
-				StdAgg myagg = (StdAgg) agg;
-				double vx = PrimitiveObjectInspectorUtils.getDouble(px,
-						xInputOI);
-				double vy = PrimitiveObjectInspectorUtils.getDouble(py,
-						yInputOI);
-				myagg.count++;
-				myagg.yavg = myagg.yavg + (vy - myagg.yavg) / myagg.count;
-				if (myagg.count > 1) {
-					myagg.covar += (vx - myagg.xavg) * (vy - myagg.yavg);
-				}
-				myagg.xavg = myagg.xavg + (vx - myagg.xavg) / myagg.count;
-			}
-		}
+        @Override
+        public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
+            assert (parameters.length == 2);
+            Object px = parameters[0];
+            Object py = parameters[1];
+            if (px != null && py != null) {
+                StdAgg myagg = (StdAgg) agg;
+                double vx = PrimitiveObjectInspectorUtils.getDouble(px, xInputOI);
+                double vy = PrimitiveObjectInspectorUtils.getDouble(py, yInputOI);
+                myagg.count++;
+                myagg.yavg = myagg.yavg + (vy - myagg.yavg) / myagg.count;
+                if (myagg.count > 1) {
+                    myagg.covar += (vx - myagg.xavg) * (vy - myagg.yavg);
+                }
+                myagg.xavg = myagg.xavg + (vx - myagg.xavg) / myagg.count;
+            }
+        }
 
-		@Override
-		public Object terminatePartial(AggregationBuffer agg)
-				throws HiveException {
-			StdAgg myagg = (StdAgg) agg;
-			((LongWritable) partialResult[0]).set(myagg.count);
-			((DoubleWritable) partialResult[1]).set(myagg.xavg);
-			((DoubleWritable) partialResult[2]).set(myagg.yavg);
-			((DoubleWritable) partialResult[3]).set(myagg.covar);
-			return partialResult;
-		}
+        @Override
+        public Object terminatePartial(AggregationBuffer agg) throws HiveException {
+            StdAgg myagg = (StdAgg) agg;
+            ((LongWritable) partialResult[0]).set(myagg.count);
+            ((DoubleWritable) partialResult[1]).set(myagg.xavg);
+            ((DoubleWritable) partialResult[2]).set(myagg.yavg);
+            ((DoubleWritable) partialResult[3]).set(myagg.covar);
+            return partialResult;
+        }
 
-		@Override
-		public void merge(AggregationBuffer agg, Object partial)
-				throws HiveException {
-			if (partial != null) {
-				StdAgg myagg = (StdAgg) agg;
+        @Override
+        public void merge(AggregationBuffer agg, Object partial) throws HiveException {
+            if (partial != null) {
+                StdAgg myagg = (StdAgg) agg;
 
-				Object partialCount = soi.getStructFieldData(partial,
-						countField);
-				Object partialXAvg = soi.getStructFieldData(partial, xavgField);
-				Object partialYAvg = soi.getStructFieldData(partial, yavgField);
-				Object partialCovar = soi.getStructFieldData(partial,
-						covarField);
+                Object partialCount = soi.getStructFieldData(partial, countField);
+                Object partialXAvg = soi.getStructFieldData(partial, xavgField);
+                Object partialYAvg = soi.getStructFieldData(partial, yavgField);
+                Object partialCovar = soi.getStructFieldData(partial, covarField);
 
-				long nA = myagg.count;
-				long nB = countFieldOI.get(partialCount);
+                long nA = myagg.count;
+                long nB = countFieldOI.get(partialCount);
 
-				if (nA == 0) {
-					// Just copy the information since there is nothing so far
-					myagg.count = countFieldOI.get(partialCount);
-					myagg.xavg = xavgFieldOI.get(partialXAvg);
-					myagg.yavg = yavgFieldOI.get(partialYAvg);
-					myagg.covar = covarFieldOI.get(partialCovar);
-				}
+                if (nA == 0) {
+                    // Just copy the information since there is nothing so far
+                    myagg.count = countFieldOI.get(partialCount);
+                    myagg.xavg = xavgFieldOI.get(partialXAvg);
+                    myagg.yavg = yavgFieldOI.get(partialYAvg);
+                    myagg.covar = covarFieldOI.get(partialCovar);
+                }
 
-				if (nA != 0 && nB != 0) {
-					// Merge the two partials
-					double xavgA = myagg.xavg;
-					double yavgA = myagg.yavg;
-					double xavgB = xavgFieldOI.get(partialXAvg);
-					double yavgB = yavgFieldOI.get(partialYAvg);
-					double covarB = covarFieldOI.get(partialCovar);
+                if (nA != 0 && nB != 0) {
+                    // Merge the two partials
+                    double xavgA = myagg.xavg;
+                    double yavgA = myagg.yavg;
+                    double xavgB = xavgFieldOI.get(partialXAvg);
+                    double yavgB = yavgFieldOI.get(partialYAvg);
+                    double covarB = covarFieldOI.get(partialCovar);
 
-					myagg.count += nB;
-					myagg.xavg = (xavgA * nA + xavgB * nB) / myagg.count;
-					myagg.yavg = (yavgA * nA + yavgB * nB) / myagg.count;
-					myagg.covar += covarB + (xavgA - xavgB) * (yavgA - yavgB)
-							* ((double) (nA * nB) / myagg.count);
-				}
-			}
-		}
+                    myagg.count += nB;
+                    myagg.xavg = (xavgA * nA + xavgB * nB) / myagg.count;
+                    myagg.yavg = (yavgA * nA + yavgB * nB) / myagg.count;
+                    myagg.covar += covarB + (xavgA - xavgB) * (yavgA - yavgB) * ((double) (nA * nB) / myagg.count);
+                }
+            }
+        }
 
-		@Override
-		public Object terminate(AggregationBuffer agg) throws HiveException {
-			StdAgg myagg = (StdAgg) agg;
+        @Override
+        public Object terminate(AggregationBuffer agg) throws HiveException {
+            StdAgg myagg = (StdAgg) agg;
 
-			if (myagg.count == 0) { // SQL standard - return null for zero
-									// elements
-				return null;
-			} else {
-				getResult().set(myagg.covar / (myagg.count));
-				return getResult();
-			}
-		}
+            if (myagg.count == 0) { // SQL standard - return null for zero
+                                    // elements
+                return null;
+            } else {
+                getResult().set(myagg.covar / (myagg.count));
+                return getResult();
+            }
+        }
 
-		public void setResult(DoubleWritable result) {
-			this.result = result;
-		}
+        public void setResult(DoubleWritable result) {
+            this.result = result;
+        }
 
-		public DoubleWritable getResult() {
-			return result;
-		}
-	}
+        public DoubleWritable getResult() {
+            return result;
+        }
+    }
 
 }
diff --git a/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java b/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java
index 0323531..afdc397 100644
--- a/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java
+++ b/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java
@@ -41,254 +41,232 @@
 
 /**
  * GenericUDAFSum.
- * 
  */
 @Description(name = "sum", value = "_FUNC_(x) - Returns the sum of a set of numbers")
 public class GenericUDAFSum extends AbstractGenericUDAFResolver {
 
-	static final Log LOG = LogFactory.getLog(GenericUDAFSum.class.getName());
+    static final Log LOG = LogFactory.getLog(GenericUDAFSum.class.getName());
 
-	@Override
-	public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters)
-			throws SemanticException {
-		if (parameters.length != 1) {
-			throw new UDFArgumentTypeException(parameters.length - 1,
-					"Exactly one argument is expected.");
-		}
+    @Override
+    public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException {
+        if (parameters.length != 1) {
+            throw new UDFArgumentTypeException(parameters.length - 1, "Exactly one argument is expected.");
+        }
 
-		if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
-			throw new UDFArgumentTypeException(0,
-					"Only primitive type arguments are accepted but "
-							+ parameters[0].getTypeName() + " is passed.");
-		}
-		switch (((PrimitiveTypeInfo) parameters[0]).getPrimitiveCategory()) {
-		case BYTE:
-		case SHORT:
-		case INT:
-		case LONG:
-			return new GenericUDAFSumLong();
-		case FLOAT:
-		case DOUBLE:
-		case STRING:
-			return new GenericUDAFSumDouble();
-		case BOOLEAN:
-		default:
-			throw new UDFArgumentTypeException(0,
-					"Only numeric or string type arguments are accepted but "
-							+ parameters[0].getTypeName() + " is passed.");
-		}
-	}
+        if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+            throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but "
+                    + parameters[0].getTypeName() + " is passed.");
+        }
+        switch (((PrimitiveTypeInfo) parameters[0]).getPrimitiveCategory()) {
+            case BYTE:
+            case SHORT:
+            case INT:
+            case LONG:
+                return new GenericUDAFSumLong();
+            case FLOAT:
+            case DOUBLE:
+            case STRING:
+                return new GenericUDAFSumDouble();
+            case BOOLEAN:
+            default:
+                throw new UDFArgumentTypeException(0, "Only numeric or string type arguments are accepted but "
+                        + parameters[0].getTypeName() + " is passed.");
+        }
+    }
 
-	/**
-	 * GenericUDAFSumDouble.
-	 * 
-	 */
-	public static class GenericUDAFSumDouble extends GenericUDAFEvaluator {
-		private PrimitiveObjectInspector inputOI;
-		private DoubleWritable result;
+    /**
+     * GenericUDAFSumDouble.
+     */
+    public static class GenericUDAFSumDouble extends GenericUDAFEvaluator {
+        private PrimitiveObjectInspector inputOI;
+        private DoubleWritable result;
 
-		@Override
-		public ObjectInspector init(Mode m, ObjectInspector[] parameters)
-				throws HiveException {
-			assert (parameters.length == 1);
-			super.init(m, parameters);
-			result = new DoubleWritable(0);
-			inputOI = (PrimitiveObjectInspector) parameters[0];
-			return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
-		}
+        @Override
+        public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
+            assert (parameters.length == 1);
+            super.init(m, parameters);
+            result = new DoubleWritable(0);
+            inputOI = (PrimitiveObjectInspector) parameters[0];
+            return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
+        }
 
-		/** class for storing double sum value. */
-		static class SumDoubleAgg implements SerializableBuffer {
-			boolean empty;
-			double sum;
+        /** class for storing double sum value. */
+        static class SumDoubleAgg implements SerializableBuffer {
+            boolean empty;
+            double sum;
 
-			@Override
-			public void deSerializeAggBuffer(byte[] data, int start, int len) {
-				empty = BufferSerDeUtil.getBoolean(data, start);
-				start += 1;
-				sum = BufferSerDeUtil.getDouble(data, start);
-			}
+            @Override
+            public void deSerializeAggBuffer(byte[] data, int start, int len) {
+                empty = BufferSerDeUtil.getBoolean(data, start);
+                start += 1;
+                sum = BufferSerDeUtil.getDouble(data, start);
+            }
 
-			@Override
-			public void serializeAggBuffer(byte[] data, int start, int len) {
-				BufferSerDeUtil.writeBoolean(empty, data, start);
-				start += 1;
-				BufferSerDeUtil.writeDouble(sum, data, start);
-			}
+            @Override
+            public void serializeAggBuffer(byte[] data, int start, int len) {
+                BufferSerDeUtil.writeBoolean(empty, data, start);
+                start += 1;
+                BufferSerDeUtil.writeDouble(sum, data, start);
+            }
 
-			@Override
-			public void serializeAggBuffer(DataOutput output)
-					throws IOException {
-				output.writeBoolean(empty);
-				output.writeDouble(sum);
-			}
-		}
+            @Override
+            public void serializeAggBuffer(DataOutput output) throws IOException {
+                output.writeBoolean(empty);
+                output.writeDouble(sum);
+            }
+        }
 
-		@Override
-		public AggregationBuffer getNewAggregationBuffer() throws HiveException {
-			SumDoubleAgg result = new SumDoubleAgg();
-			reset(result);
-			return result;
-		}
+        @Override
+        public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+            SumDoubleAgg result = new SumDoubleAgg();
+            reset(result);
+            return result;
+        }
 
-		@Override
-		public void reset(AggregationBuffer agg) throws HiveException {
-			SumDoubleAgg myagg = (SumDoubleAgg) agg;
-			myagg.empty = true;
-			myagg.sum = 0;
-		}
+        @Override
+        public void reset(AggregationBuffer agg) throws HiveException {
+            SumDoubleAgg myagg = (SumDoubleAgg) agg;
+            myagg.empty = true;
+            myagg.sum = 0;
+        }
 
-		boolean warned = false;
+        boolean warned = false;
 
-		@Override
-		public void iterate(AggregationBuffer agg, Object[] parameters)
-				throws HiveException {
-			assert (parameters.length == 1);
-			try {
-				merge(agg, parameters[0]);
-			} catch (NumberFormatException e) {
-				if (!warned) {
-					warned = true;
-					LOG.warn(getClass().getSimpleName() + " "
-							+ StringUtils.stringifyException(e));
-					LOG.warn(getClass().getSimpleName()
-							+ " ignoring similar exceptions.");
-				}
-			}
-		}
+        @Override
+        public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
+            assert (parameters.length == 1);
+            try {
+                merge(agg, parameters[0]);
+            } catch (NumberFormatException e) {
+                if (!warned) {
+                    warned = true;
+                    LOG.warn(getClass().getSimpleName() + " " + StringUtils.stringifyException(e));
+                    LOG.warn(getClass().getSimpleName() + " ignoring similar exceptions.");
+                }
+            }
+        }
 
-		@Override
-		public Object terminatePartial(AggregationBuffer agg)
-				throws HiveException {
-			return terminate(agg);
-		}
+        @Override
+        public Object terminatePartial(AggregationBuffer agg) throws HiveException {
+            return terminate(agg);
+        }
 
-		@Override
-		public void merge(AggregationBuffer agg, Object partial)
-				throws HiveException {
-			if (partial != null) {
-				SumDoubleAgg myagg = (SumDoubleAgg) agg;
-				myagg.empty = false;
-				myagg.sum += PrimitiveObjectInspectorUtils.getDouble(partial,
-						inputOI);
-			}
-		}
+        @Override
+        public void merge(AggregationBuffer agg, Object partial) throws HiveException {
+            if (partial != null) {
+                SumDoubleAgg myagg = (SumDoubleAgg) agg;
+                myagg.empty = false;
+                myagg.sum += PrimitiveObjectInspectorUtils.getDouble(partial, inputOI);
+            }
+        }
 
-		@Override
-		public Object terminate(AggregationBuffer agg) throws HiveException {
-			SumDoubleAgg myagg = (SumDoubleAgg) agg;
-			if (myagg.empty) {
-				return null;
-			}
-			result.set(myagg.sum);
-			return result;
-		}
+        @Override
+        public Object terminate(AggregationBuffer agg) throws HiveException {
+            SumDoubleAgg myagg = (SumDoubleAgg) agg;
+            if (myagg.empty) {
+                return null;
+            }
+            result.set(myagg.sum);
+            return result;
+        }
 
-	}
+    }
 
-	/**
-	 * GenericUDAFSumLong.
-	 * 
-	 */
-	public static class GenericUDAFSumLong extends GenericUDAFEvaluator {
-		private PrimitiveObjectInspector inputOI;
-		private LongWritable result;
+    /**
+     * GenericUDAFSumLong.
+     */
+    public static class GenericUDAFSumLong extends GenericUDAFEvaluator {
+        private PrimitiveObjectInspector inputOI;
+        private LongWritable result;
 
-		@Override
-		public ObjectInspector init(Mode m, ObjectInspector[] parameters)
-				throws HiveException {
-			assert (parameters.length == 1);
-			super.init(m, parameters);
-			result = new LongWritable(0);
-			inputOI = (PrimitiveObjectInspector) parameters[0];
-			return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
-		}
+        @Override
+        public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
+            assert (parameters.length == 1);
+            super.init(m, parameters);
+            result = new LongWritable(0);
+            inputOI = (PrimitiveObjectInspector) parameters[0];
+            return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
+        }
 
-		/** class for storing double sum value. */
-		static class SumLongAgg implements SerializableBuffer {
-			boolean empty;
-			long sum;
+        /** class for storing double sum value. */
+        static class SumLongAgg implements SerializableBuffer {
+            boolean empty;
+            long sum;
 
-			@Override
-			public void deSerializeAggBuffer(byte[] data, int start, int len) {
-				empty = BufferSerDeUtil.getBoolean(data, start);
-				start += 1;
-				sum = BufferSerDeUtil.getLong(data, start);
-			}
+            @Override
+            public void deSerializeAggBuffer(byte[] data, int start, int len) {
+                empty = BufferSerDeUtil.getBoolean(data, start);
+                start += 1;
+                sum = BufferSerDeUtil.getLong(data, start);
+            }
 
-			@Override
-			public void serializeAggBuffer(byte[] data, int start, int len) {
-				BufferSerDeUtil.writeBoolean(empty, data, start);
-				start += 1;
-				BufferSerDeUtil.writeLong(sum, data, start);
-			}
+            @Override
+            public void serializeAggBuffer(byte[] data, int start, int len) {
+                BufferSerDeUtil.writeBoolean(empty, data, start);
+                start += 1;
+                BufferSerDeUtil.writeLong(sum, data, start);
+            }
 
-			@Override
-			public void serializeAggBuffer(DataOutput output)
-					throws IOException {
-				output.writeBoolean(empty);
-				output.writeLong(sum);
-			}
-		}
+            @Override
+            public void serializeAggBuffer(DataOutput output) throws IOException {
+                output.writeBoolean(empty);
+                output.writeLong(sum);
+            }
+        }
 
-		@Override
-		public AggregationBuffer getNewAggregationBuffer() throws HiveException {
-			SumLongAgg result = new SumLongAgg();
-			reset(result);
-			return result;
-		}
+        @Override
+        public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+            SumLongAgg result = new SumLongAgg();
+            reset(result);
+            return result;
+        }
 
-		@Override
-		public void reset(AggregationBuffer agg) throws HiveException {
-			SumLongAgg myagg = (SumLongAgg) agg;
-			myagg.empty = true;
-			myagg.sum = 0;
-		}
+        @Override
+        public void reset(AggregationBuffer agg) throws HiveException {
+            SumLongAgg myagg = (SumLongAgg) agg;
+            myagg.empty = true;
+            myagg.sum = 0;
+        }
 
-		private boolean warned = false;
+        private boolean warned = false;
 
-		@Override
-		public void iterate(AggregationBuffer agg, Object[] parameters)
-				throws HiveException {
-			assert (parameters.length == 1);
-			try {
-				merge(agg, parameters[0]);
-			} catch (NumberFormatException e) {
-				if (!warned) {
-					warned = true;
-					LOG.warn(getClass().getSimpleName() + " "
-							+ StringUtils.stringifyException(e));
-				}
-			}
-		}
+        @Override
+        public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
+            assert (parameters.length == 1);
+            try {
+                merge(agg, parameters[0]);
+            } catch (NumberFormatException e) {
+                if (!warned) {
+                    warned = true;
+                    LOG.warn(getClass().getSimpleName() + " " + StringUtils.stringifyException(e));
+                }
+            }
+        }
 
-		@Override
-		public Object terminatePartial(AggregationBuffer agg)
-				throws HiveException {
-			return terminate(agg);
-		}
+        @Override
+        public Object terminatePartial(AggregationBuffer agg) throws HiveException {
+            return terminate(agg);
+        }
 
-		@Override
-		public void merge(AggregationBuffer agg, Object partial)
-				throws HiveException {
-			if (partial != null) {
-				SumLongAgg myagg = (SumLongAgg) agg;
-				myagg.sum += PrimitiveObjectInspectorUtils.getLong(partial,
-						inputOI);
-				myagg.empty = false;
-			}
-		}
+        @Override
+        public void merge(AggregationBuffer agg, Object partial) throws HiveException {
+            if (partial != null) {
+                SumLongAgg myagg = (SumLongAgg) agg;
+                myagg.sum += PrimitiveObjectInspectorUtils.getLong(partial, inputOI);
+                myagg.empty = false;
+            }
+        }
 
-		@Override
-		public Object terminate(AggregationBuffer agg) throws HiveException {
-			SumLongAgg myagg = (SumLongAgg) agg;
-			if (myagg.empty) {
-				return null;
-			}
-			result.set(myagg.sum);
-			return result;
-		}
+        @Override
+        public Object terminate(AggregationBuffer agg) throws HiveException {
+            SumLongAgg myagg = (SumLongAgg) agg;
+            if (myagg.empty) {
+                return null;
+            }
+            result.set(myagg.sum);
+            return result;
+        }
 
-	}
+    }
 
 }
diff --git a/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java b/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java
index 4c16f5a..e839008 100644
--- a/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java
+++ b/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java
@@ -48,284 +48,258 @@
 /**
  * Compute the variance. This class is extended by: GenericUDAFVarianceSample
  * GenericUDAFStd GenericUDAFStdSample
- * 
  */
 @Description(name = "variance,var_pop", value = "_FUNC_(x) - Returns the variance of a set of numbers")
 public class GenericUDAFVariance extends AbstractGenericUDAFResolver {
 
-	static final Log LOG = LogFactory.getLog(GenericUDAFVariance.class
-			.getName());
+    static final Log LOG = LogFactory.getLog(GenericUDAFVariance.class.getName());
 
-	@Override
-	public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters)
-			throws SemanticException {
-		if (parameters.length != 1) {
-			throw new UDFArgumentTypeException(parameters.length - 1,
-					"Exactly one argument is expected.");
-		}
+    @Override
+    public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException {
+        if (parameters.length != 1) {
+            throw new UDFArgumentTypeException(parameters.length - 1, "Exactly one argument is expected.");
+        }
 
-		if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
-			throw new UDFArgumentTypeException(0,
-					"Only primitive type arguments are accepted but "
-							+ parameters[0].getTypeName() + " is passed.");
-		}
-		switch (((PrimitiveTypeInfo) parameters[0]).getPrimitiveCategory()) {
-		case BYTE:
-		case SHORT:
-		case INT:
-		case LONG:
-		case FLOAT:
-		case DOUBLE:
-		case STRING:
-			return new GenericUDAFVarianceEvaluator();
-		case BOOLEAN:
-		default:
-			throw new UDFArgumentTypeException(0,
-					"Only numeric or string type arguments are accepted but "
-							+ parameters[0].getTypeName() + " is passed.");
-		}
-	}
+        if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+            throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but "
+                    + parameters[0].getTypeName() + " is passed.");
+        }
+        switch (((PrimitiveTypeInfo) parameters[0]).getPrimitiveCategory()) {
+            case BYTE:
+            case SHORT:
+            case INT:
+            case LONG:
+            case FLOAT:
+            case DOUBLE:
+            case STRING:
+                return new GenericUDAFVarianceEvaluator();
+            case BOOLEAN:
+            default:
+                throw new UDFArgumentTypeException(0, "Only numeric or string type arguments are accepted but "
+                        + parameters[0].getTypeName() + " is passed.");
+        }
+    }
 
-	/**
-	 * Evaluate the variance using the algorithm described by Chan, Golub, and
-	 * LeVeque in
-	 * "Algorithms for computing the sample variance: analysis and recommendations"
-	 * The American Statistician, 37 (1983) pp. 242--247.
-	 * 
-	 * variance = variance1 + variance2 + n/(m*(m+n)) * pow(((m/n)*t1 - t2),2)
-	 * 
-	 * where: - variance is sum[x-avg^2] (this is actually n times the variance)
-	 * and is updated at every step. - n is the count of elements in chunk1 - m
-	 * is the count of elements in chunk2 - t1 = sum of elements in chunk1, t2 =
-	 * sum of elements in chunk2.
-	 * 
-	 * This algorithm was proven to be numerically stable by J.L. Barlow in
-	 * "Error analysis of a pairwise summation algorithm to compute sample variance"
-	 * Numer. Math, 58 (1991) pp. 583--590
-	 * 
-	 */
-	public static class GenericUDAFVarianceEvaluator extends
-			GenericUDAFEvaluator {
+    /**
+     * Evaluate the variance using the algorithm described by Chan, Golub, and
+     * LeVeque in
+     * "Algorithms for computing the sample variance: analysis and recommendations"
+     * The American Statistician, 37 (1983) pp. 242--247.
+     * variance = variance1 + variance2 + n/(m*(m+n)) * pow(((m/n)*t1 - t2),2)
+     * where: - variance is sum[x-avg^2] (this is actually n times the variance)
+     * and is updated at every step. - n is the count of elements in chunk1 - m
+     * is the count of elements in chunk2 - t1 = sum of elements in chunk1, t2 =
+     * sum of elements in chunk2.
+     * This algorithm was proven to be numerically stable by J.L. Barlow in
+     * "Error analysis of a pairwise summation algorithm to compute sample variance"
+     * Numer. Math, 58 (1991) pp. 583--590
+     */
+    public static class GenericUDAFVarianceEvaluator extends GenericUDAFEvaluator {
 
-		// For PARTIAL1 and COMPLETE
-		private PrimitiveObjectInspector inputOI;
+        // For PARTIAL1 and COMPLETE
+        private PrimitiveObjectInspector inputOI;
 
-		// For PARTIAL2 and FINAL
-		private StructObjectInspector soi;
-		private StructField countField;
-		private StructField sumField;
-		private StructField varianceField;
-		private LongObjectInspector countFieldOI;
-		private DoubleObjectInspector sumFieldOI;
+        // For PARTIAL2 and FINAL
+        private StructObjectInspector soi;
+        private StructField countField;
+        private StructField sumField;
+        private StructField varianceField;
+        private LongObjectInspector countFieldOI;
+        private DoubleObjectInspector sumFieldOI;
 
-		// For PARTIAL1 and PARTIAL2
-		private Object[] partialResult;
+        // For PARTIAL1 and PARTIAL2
+        private Object[] partialResult;
 
-		// For FINAL and COMPLETE
-		private DoubleWritable result;
+        // For FINAL and COMPLETE
+        private DoubleWritable result;
 
-		@Override
-		public ObjectInspector init(Mode m, ObjectInspector[] parameters)
-				throws HiveException {
-			assert (parameters.length == 1);
-			super.init(m, parameters);
+        @Override
+        public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
+            assert (parameters.length == 1);
+            super.init(m, parameters);
 
-			// init input
-			if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {
-				inputOI = (PrimitiveObjectInspector) parameters[0];
-			} else {
-				soi = (StructObjectInspector) parameters[0];
+            // init input
+            if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {
+                inputOI = (PrimitiveObjectInspector) parameters[0];
+            } else {
+                soi = (StructObjectInspector) parameters[0];
 
-				countField = soi.getStructFieldRef("count");
-				sumField = soi.getStructFieldRef("sum");
-				varianceField = soi.getStructFieldRef("variance");
+                countField = soi.getStructFieldRef("count");
+                sumField = soi.getStructFieldRef("sum");
+                varianceField = soi.getStructFieldRef("variance");
 
-				countFieldOI = (LongObjectInspector) countField
-						.getFieldObjectInspector();
-				sumFieldOI = (DoubleObjectInspector) sumField
-						.getFieldObjectInspector();
-			}
+                countFieldOI = (LongObjectInspector) countField.getFieldObjectInspector();
+                sumFieldOI = (DoubleObjectInspector) sumField.getFieldObjectInspector();
+            }
 
-			// init output
-			if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {
-				// The output of a partial aggregation is a struct containing
-				// a long count and doubles sum and variance.
+            // init output
+            if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {
+                // The output of a partial aggregation is a struct containing
+                // a long count and doubles sum and variance.
 
-				ArrayList<ObjectInspector> foi = new ArrayList<ObjectInspector>();
+                ArrayList<ObjectInspector> foi = new ArrayList<ObjectInspector>();
 
-				foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
-				foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
-				foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+                foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
+                foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+                foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
 
-				ArrayList<String> fname = new ArrayList<String>();
-				fname.add("count");
-				fname.add("sum");
-				fname.add("variance");
+                ArrayList<String> fname = new ArrayList<String>();
+                fname.add("count");
+                fname.add("sum");
+                fname.add("variance");
 
-				partialResult = new Object[3];
-				partialResult[0] = new LongWritable(0);
-				partialResult[1] = new DoubleWritable(0);
-				partialResult[2] = new DoubleWritable(0);
+                partialResult = new Object[3];
+                partialResult[0] = new LongWritable(0);
+                partialResult[1] = new DoubleWritable(0);
+                partialResult[2] = new DoubleWritable(0);
 
-				return ObjectInspectorFactory.getStandardStructObjectInspector(
-						fname, foi);
+                return ObjectInspectorFactory.getStandardStructObjectInspector(fname, foi);
 
-			} else {
-				setResult(new DoubleWritable(0));
-				return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
-			}
-		}
+            } else {
+                setResult(new DoubleWritable(0));
+                return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
+            }
+        }
 
-		static class StdAgg implements SerializableBuffer {
-			long count; // number of elements
-			double sum; // sum of elements
-			double variance; // sum[x-avg^2] (this is actually n times the
-								// variance)
+        static class StdAgg implements SerializableBuffer {
+            long count; // number of elements
+            double sum; // sum of elements
+            double variance; // sum[x-avg^2] (this is actually n times the
+                             // variance)
 
-			@Override
-			public void deSerializeAggBuffer(byte[] data, int start, int len) {
-				count = BufferSerDeUtil.getLong(data, start);
-				start += 8;
-				sum = BufferSerDeUtil.getDouble(data, start);
-				start += 8;
-				variance = BufferSerDeUtil.getDouble(data, start);
-			}
+            @Override
+            public void deSerializeAggBuffer(byte[] data, int start, int len) {
+                count = BufferSerDeUtil.getLong(data, start);
+                start += 8;
+                sum = BufferSerDeUtil.getDouble(data, start);
+                start += 8;
+                variance = BufferSerDeUtil.getDouble(data, start);
+            }
 
-			@Override
-			public void serializeAggBuffer(byte[] data, int start, int len) {
-				BufferSerDeUtil.writeLong(count, data, start);
-				start += 8;
-				BufferSerDeUtil.writeDouble(sum, data, start);
-				start += 8;
-				BufferSerDeUtil.writeDouble(variance, data, start);
-			}
+            @Override
+            public void serializeAggBuffer(byte[] data, int start, int len) {
+                BufferSerDeUtil.writeLong(count, data, start);
+                start += 8;
+                BufferSerDeUtil.writeDouble(sum, data, start);
+                start += 8;
+                BufferSerDeUtil.writeDouble(variance, data, start);
+            }
 
-			@Override
-			public void serializeAggBuffer(DataOutput output)
-					throws IOException {
-				output.writeLong(count);
-				output.writeDouble(sum);
-				output.writeDouble(variance);
-			}
-		};
+            @Override
+            public void serializeAggBuffer(DataOutput output) throws IOException {
+                output.writeLong(count);
+                output.writeDouble(sum);
+                output.writeDouble(variance);
+            }
+        };
 
-		@Override
-		public AggregationBuffer getNewAggregationBuffer() throws HiveException {
-			StdAgg result = new StdAgg();
-			reset(result);
-			return result;
-		}
+        @Override
+        public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+            StdAgg result = new StdAgg();
+            reset(result);
+            return result;
+        }
 
-		@Override
-		public void reset(AggregationBuffer agg) throws HiveException {
-			StdAgg myagg = (StdAgg) agg;
-			myagg.count = 0;
-			myagg.sum = 0;
-			myagg.variance = 0;
-		}
+        @Override
+        public void reset(AggregationBuffer agg) throws HiveException {
+            StdAgg myagg = (StdAgg) agg;
+            myagg.count = 0;
+            myagg.sum = 0;
+            myagg.variance = 0;
+        }
 
-		private boolean warned = false;
+        private boolean warned = false;
 
-		@Override
-		public void iterate(AggregationBuffer agg, Object[] parameters)
-				throws HiveException {
-			assert (parameters.length == 1);
-			Object p = parameters[0];
-			if (p != null) {
-				StdAgg myagg = (StdAgg) agg;
-				try {
-					double v = PrimitiveObjectInspectorUtils.getDouble(p,
-							inputOI);
-					myagg.count++;
-					myagg.sum += v;
-					if (myagg.count > 1) {
-						double t = myagg.count * v - myagg.sum;
-						myagg.variance += (t * t)
-								/ ((double) myagg.count * (myagg.count - 1));
-					}
-				} catch (NumberFormatException e) {
-					if (!warned) {
-						warned = true;
-						LOG.warn(getClass().getSimpleName() + " "
-								+ StringUtils.stringifyException(e));
-						LOG.warn(getClass().getSimpleName()
-								+ " ignoring similar exceptions.");
-					}
-				}
-			}
-		}
+        @Override
+        public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
+            assert (parameters.length == 1);
+            Object p = parameters[0];
+            if (p != null) {
+                StdAgg myagg = (StdAgg) agg;
+                try {
+                    double v = PrimitiveObjectInspectorUtils.getDouble(p, inputOI);
+                    myagg.count++;
+                    myagg.sum += v;
+                    if (myagg.count > 1) {
+                        double t = myagg.count * v - myagg.sum;
+                        myagg.variance += (t * t) / ((double) myagg.count * (myagg.count - 1));
+                    }
+                } catch (NumberFormatException e) {
+                    if (!warned) {
+                        warned = true;
+                        LOG.warn(getClass().getSimpleName() + " " + StringUtils.stringifyException(e));
+                        LOG.warn(getClass().getSimpleName() + " ignoring similar exceptions.");
+                    }
+                }
+            }
+        }
 
-		@Override
-		public Object terminatePartial(AggregationBuffer agg)
-				throws HiveException {
-			StdAgg myagg = (StdAgg) agg;
-			((LongWritable) partialResult[0]).set(myagg.count);
-			((DoubleWritable) partialResult[1]).set(myagg.sum);
-			((DoubleWritable) partialResult[2]).set(myagg.variance);
-			return partialResult;
-		}
+        @Override
+        public Object terminatePartial(AggregationBuffer agg) throws HiveException {
+            StdAgg myagg = (StdAgg) agg;
+            ((LongWritable) partialResult[0]).set(myagg.count);
+            ((DoubleWritable) partialResult[1]).set(myagg.sum);
+            ((DoubleWritable) partialResult[2]).set(myagg.variance);
+            return partialResult;
+        }
 
-		@Override
-		public void merge(AggregationBuffer agg, Object partial)
-				throws HiveException {
-			if (partial != null) {
-				StdAgg myagg = (StdAgg) agg;
+        @Override
+        public void merge(AggregationBuffer agg, Object partial) throws HiveException {
+            if (partial != null) {
+                StdAgg myagg = (StdAgg) agg;
 
-				Object partialCount = soi.getStructFieldData(partial,
-						countField);
-				Object partialSum = soi.getStructFieldData(partial, sumField);
-				Object partialVariance = soi.getStructFieldData(partial,
-						varianceField);
+                Object partialCount = soi.getStructFieldData(partial, countField);
+                Object partialSum = soi.getStructFieldData(partial, sumField);
+                Object partialVariance = soi.getStructFieldData(partial, varianceField);
 
-				long n = myagg.count;
-				long m = countFieldOI.get(partialCount);
+                long n = myagg.count;
+                long m = countFieldOI.get(partialCount);
 
-				if (n == 0) {
-					// Just copy the information since there is nothing so far
-					myagg.variance = sumFieldOI.get(partialVariance);
-					myagg.count = countFieldOI.get(partialCount);
-					myagg.sum = sumFieldOI.get(partialSum);
-				}
+                if (n == 0) {
+                    // Just copy the information since there is nothing so far
+                    myagg.variance = sumFieldOI.get(partialVariance);
+                    myagg.count = countFieldOI.get(partialCount);
+                    myagg.sum = sumFieldOI.get(partialSum);
+                }
 
-				if (m != 0 && n != 0) {
-					// Merge the two partials
+                if (m != 0 && n != 0) {
+                    // Merge the two partials
 
-					double a = myagg.sum;
-					double b = sumFieldOI.get(partialSum);
+                    double a = myagg.sum;
+                    double b = sumFieldOI.get(partialSum);
 
-					myagg.count += m;
-					myagg.sum += b;
-					double t = (m / (double) n) * a - b;
-					myagg.variance += sumFieldOI.get(partialVariance)
-							+ ((n / (double) m) / ((double) n + m)) * t * t;
-				}
-			}
-		}
+                    myagg.count += m;
+                    myagg.sum += b;
+                    double t = (m / (double) n) * a - b;
+                    myagg.variance += sumFieldOI.get(partialVariance) + ((n / (double) m) / ((double) n + m)) * t * t;
+                }
+            }
+        }
 
-		@Override
-		public Object terminate(AggregationBuffer agg) throws HiveException {
-			StdAgg myagg = (StdAgg) agg;
+        @Override
+        public Object terminate(AggregationBuffer agg) throws HiveException {
+            StdAgg myagg = (StdAgg) agg;
 
-			if (myagg.count == 0) { // SQL standard - return null for zero
-									// elements
-				return null;
-			} else {
-				if (myagg.count > 1) {
-					getResult().set(myagg.variance / (myagg.count));
-				} else { // for one element the variance is always 0
-					getResult().set(0);
-				}
-				return getResult();
-			}
-		}
+            if (myagg.count == 0) { // SQL standard - return null for zero
+                                    // elements
+                return null;
+            } else {
+                if (myagg.count > 1) {
+                    getResult().set(myagg.variance / (myagg.count));
+                } else { // for one element the variance is always 0
+                    getResult().set(0);
+                }
+                return getResult();
+            }
+        }
 
-		public void setResult(DoubleWritable result) {
-			this.result = result;
-		}
+        public void setResult(DoubleWritable result) {
+            this.result = result;
+        }
 
-		public DoubleWritable getResult() {
-			return result;
-		}
-	}
+        public DoubleWritable getResult() {
+            return result;
+        }
+    }
 
 }
diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestCase.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestCase.java
index a69a3f2..3a0f381 100644
--- a/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestCase.java
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestCase.java
@@ -15,130 +15,128 @@
 import edu.uci.ics.hivesterix.runtime.config.ConfUtil;
 
 public class PerfTestCase extends AbstractPerfTestCase {
-	private File resultFile;
-	private FileSystem dfs;
+    private File resultFile;
+    private FileSystem dfs;
 
-	PerfTestCase(File queryFile, File resultFile) {
-		super("testRuntimeFunction", queryFile);
-		this.queryFile = queryFile;
-		this.resultFile = resultFile;
-	}
+    PerfTestCase(File queryFile, File resultFile) {
+        super("testRuntimeFunction", queryFile);
+        this.queryFile = queryFile;
+        this.resultFile = resultFile;
+    }
 
-	@Test
-	public void testRuntimeFunction() throws Exception {
-		StringBuilder queryString = new StringBuilder();
-		readFileToString(queryFile, queryString);
-		String[] queries = queryString.toString().split(";");
-		StringWriter sw = new StringWriter();
+    @Test
+    public void testRuntimeFunction() throws Exception {
+        StringBuilder queryString = new StringBuilder();
+        readFileToString(queryFile, queryString);
+        String[] queries = queryString.toString().split(";");
+        StringWriter sw = new StringWriter();
 
-		HiveConf hconf = ConfUtil.getHiveConf();
-		Driver driver = new Driver(hconf, new PrintWriter(sw));
-		driver.init();
+        HiveConf hconf = ConfUtil.getHiveConf();
+        Driver driver = new Driver(hconf, new PrintWriter(sw));
+        driver.init();
 
-		dfs = FileSystem.get(ConfUtil.getJobConf());
+        dfs = FileSystem.get(ConfUtil.getJobConf());
 
-		int i = 0;
-		for (String query : queries) {
-			if (i == queries.length - 1)
-				break;
-			driver.run(query);
-			driver.clear();
-			i++;
-		}
+        int i = 0;
+        for (String query : queries) {
+            if (i == queries.length - 1)
+                break;
+            driver.run(query);
+            driver.clear();
+            i++;
+        }
 
-		String warehouse = hconf.get("hive.metastore.warehouse.dir");
-		String tableName = removeExt(resultFile.getName());
-		String directory = warehouse + "/" + tableName + "/";
-		String localDirectory = "tmp";
+        String warehouse = hconf.get("hive.metastore.warehouse.dir");
+        String tableName = removeExt(resultFile.getName());
+        String directory = warehouse + "/" + tableName + "/";
+        String localDirectory = "tmp";
 
-		FileStatus[] files = dfs.listStatus(new Path(directory));
-		FileSystem lfs = null;
-		if (files == null) {
-			lfs = FileSystem.getLocal(ConfUtil.getJobConf());
-			files = lfs.listStatus(new Path(directory));
-		}
+        FileStatus[] files = dfs.listStatus(new Path(directory));
+        FileSystem lfs = null;
+        if (files == null) {
+            lfs = FileSystem.getLocal(ConfUtil.getJobConf());
+            files = lfs.listStatus(new Path(directory));
+        }
 
-		File resultDirectory = new File(localDirectory + "/" + tableName);
-		deleteDir(resultDirectory);
-		resultDirectory.mkdir();
+        File resultDirectory = new File(localDirectory + "/" + tableName);
+        deleteDir(resultDirectory);
+        resultDirectory.mkdir();
 
-		for (FileStatus fs : files) {
-			Path src = fs.getPath();
-			if (src.getName().indexOf("crc") >= 0)
-				continue;
+        for (FileStatus fs : files) {
+            Path src = fs.getPath();
+            if (src.getName().indexOf("crc") >= 0)
+                continue;
 
-			String destStr = localDirectory + "/" + tableName + "/"
-					+ src.getName();
-			Path dest = new Path(destStr);
-			if (lfs != null) {
-				lfs.copyToLocalFile(src, dest);
-				dfs.copyFromLocalFile(dest, new Path(directory));
-			} else
-				dfs.copyToLocalFile(src, dest);
-		}
+            String destStr = localDirectory + "/" + tableName + "/" + src.getName();
+            Path dest = new Path(destStr);
+            if (lfs != null) {
+                lfs.copyToLocalFile(src, dest);
+                dfs.copyFromLocalFile(dest, new Path(directory));
+            } else
+                dfs.copyToLocalFile(src, dest);
+        }
 
-		File[] rFiles = resultDirectory.listFiles();
-		StringBuilder sb = new StringBuilder();
-		for (File r : rFiles) {
-			if (r.getName().indexOf("crc") >= 0)
-				continue;
-			readFileToString(r, sb);
-		}
-		deleteDir(resultDirectory);
+        File[] rFiles = resultDirectory.listFiles();
+        StringBuilder sb = new StringBuilder();
+        for (File r : rFiles) {
+            if (r.getName().indexOf("crc") >= 0)
+                continue;
+            readFileToString(r, sb);
+        }
+        deleteDir(resultDirectory);
 
-		StringBuilder buf = new StringBuilder();
-		readFileToString(resultFile, buf);
-		if (!equal(buf, sb)) {
-			throw new Exception("Result for " + queryFile + " changed:\n"
-					+ sw.toString());
-		}
-	}
+        StringBuilder buf = new StringBuilder();
+        readFileToString(resultFile, buf);
+        if (!equal(buf, sb)) {
+            throw new Exception("Result for " + queryFile + " changed:\n" + sw.toString());
+        }
+    }
 
-	private void deleteDir(File resultDirectory) {
-		if (resultDirectory.exists()) {
-			File[] rFiles = resultDirectory.listFiles();
-			for (File r : rFiles)
-				r.delete();
-			resultDirectory.delete();
-		}
-	}
+    private void deleteDir(File resultDirectory) {
+        if (resultDirectory.exists()) {
+            File[] rFiles = resultDirectory.listFiles();
+            for (File r : rFiles)
+                r.delete();
+            resultDirectory.delete();
+        }
+    }
 
-	private boolean equal(StringBuilder sb1, StringBuilder sb2) {
-		String s1 = sb1.toString();
-		String s2 = sb2.toString();
-		String[] rowsOne = s1.split("\n");
-		String[] rowsTwo = s2.split("\n");
+    private boolean equal(StringBuilder sb1, StringBuilder sb2) {
+        String s1 = sb1.toString();
+        String s2 = sb2.toString();
+        String[] rowsOne = s1.split("\n");
+        String[] rowsTwo = s2.split("\n");
 
-		if (rowsOne.length != rowsTwo.length)
-			return false;
+        if (rowsOne.length != rowsTwo.length)
+            return false;
 
-		for (int i = 0; i < rowsOne.length; i++) {
-			String row1 = rowsOne[i];
-			String row2 = rowsTwo[i];
+        for (int i = 0; i < rowsOne.length; i++) {
+            String row1 = rowsOne[i];
+            String row2 = rowsTwo[i];
 
-			if (row1.equals(row2))
-				continue;
+            if (row1.equals(row2))
+                continue;
 
-			String[] fields1 = row1.split("");
-			String[] fields2 = row2.split("");
+            String[] fields1 = row1.split("");
+            String[] fields2 = row2.split("");
 
-			for (int j = 0; j < fields1.length; j++) {
-				if (fields1[j].equals(fields2[j])) {
-					continue;
-				} else if (fields1[j].indexOf('.') < 0) {
-					return false;
-				} else {
-					Float float1 = Float.parseFloat(fields1[j]);
-					Float float2 = Float.parseFloat(fields2[j]);
+            for (int j = 0; j < fields1.length; j++) {
+                if (fields1[j].equals(fields2[j])) {
+                    continue;
+                } else if (fields1[j].indexOf('.') < 0) {
+                    return false;
+                } else {
+                    Float float1 = Float.parseFloat(fields1[j]);
+                    Float float2 = Float.parseFloat(fields2[j]);
 
-					if (Math.abs(float1 - float2) == 0)
-						continue;
-					else
-						return false;
-				}
-			}
-		}
+                    if (Math.abs(float1 - float2) == 0)
+                        continue;
+                    else
+                        return false;
+                }
+            }
+        }
 
-		return true;
-	}
+        return true;
+    }
 }
diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestSuite.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestSuite.java
index 8d8178f..796842d 100644
--- a/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestSuite.java
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestSuite.java
@@ -9,66 +9,66 @@
 

 public class PerfTestSuite extends AbstractPerfTestSuiteClass {

 

-	private static final String PATH_TO_QUERIES = "src/test/resources/perf/queries/";

-	private static final String PATH_TO_RESULTS = "src/test/resources/perf/results/";

-	private static final String PATH_TO_IGNORES = "src/test/resources/perf/ignore.txt";

+    private static final String PATH_TO_QUERIES = "src/test/resources/perf/queries/";

+    private static final String PATH_TO_RESULTS = "src/test/resources/perf/results/";

+    private static final String PATH_TO_IGNORES = "src/test/resources/perf/ignore.txt";

 

-	private static final String FILE_EXTENSION_OF_RESULTS = "result";

+    private static final String FILE_EXTENSION_OF_RESULTS = "result";

 

-	public static Test suite() throws Exception {

-		List<String> ignores = getIgnoreList(PATH_TO_IGNORES);

-		File testData = new File(PATH_TO_QUERIES);

-		File[] queries = testData.listFiles();

-		PerfTestSuite testSuite = new PerfTestSuite();

+    public static Test suite() throws Exception {

+        List<String> ignores = getIgnoreList(PATH_TO_IGNORES);

+        File testData = new File(PATH_TO_QUERIES);

+        File[] queries = testData.listFiles();

+        PerfTestSuite testSuite = new PerfTestSuite();

 

-		// set hdfs and hyracks cluster, and load test data to hdfs

-		try {

-			testSuite.setup();

-			testSuite.loadData();

-		} catch (Exception e) {

-			e.printStackTrace();

-			throw new IllegalStateException(e.getMessage());

-		}

+        // set hdfs and hyracks cluster, and load test data to hdfs

+        try {

+            testSuite.setup();

+            testSuite.loadData();

+        } catch (Exception e) {

+            e.printStackTrace();

+            throw new IllegalStateException(e.getMessage());

+        }

 

-		for (File qFile : queries) {

-			if (isIgnored(qFile.getName(), ignores))

-				continue;

+        for (File qFile : queries) {

+            if (isIgnored(qFile.getName(), ignores))

+                continue;

 

-			if (qFile.isFile()) {

-				String resultFileName = hiveExtToResExt(qFile.getName());

-				File rFile = new File(PATH_TO_RESULTS + resultFileName);

-				testSuite.addTest(new PerfTestCase(qFile, rFile));

-			}

-		}

-		return testSuite;

-	}

+            if (qFile.isFile()) {

+                String resultFileName = hiveExtToResExt(qFile.getName());

+                File rFile = new File(PATH_TO_RESULTS + resultFileName);

+                testSuite.addTest(new PerfTestCase(qFile, rFile));

+            }

+        }

+        return testSuite;

+    }

 

-	private static String hiveExtToResExt(String fname) {

-		int dot = fname.lastIndexOf('.');

-		return fname.substring(0, dot + 1) + FILE_EXTENSION_OF_RESULTS;

-	}

+    private static String hiveExtToResExt(String fname) {

+        int dot = fname.lastIndexOf('.');

+        return fname.substring(0, dot + 1) + FILE_EXTENSION_OF_RESULTS;

+    }

 

-	/**

-	 * Runs the tests and collects their result in a TestResult.

-	 */

-	@Override

-	public void run(TestResult result) {

+    /**

+     * Runs the tests and collects their result in a TestResult.

+     */

+    @Override

+    public void run(TestResult result) {

 

-		int testCount = countTestCases();

-		for (int i = 0; i < testCount; i++) {

-			Test each = this.testAt(i);

-			if (result.shouldStop())

-				break;

-			runTest(each, result);

-		}

+        int testCount = countTestCases();

+        for (int i = 0; i < testCount; i++) {

+            Test each = this.testAt(i);

+            if (result.shouldStop())

+                break;

+            runTest(each, result);

+        }

 

-		// cleanup hdfs and hyracks cluster

-		try {

-			cleanup();

-		} catch (Exception e) {

-			e.printStackTrace();

-			throw new IllegalStateException(e.getMessage());

-		}

-	}

+        // cleanup hdfs and hyracks cluster

+        try {

+            cleanup();

+        } catch (Exception e) {

+            e.printStackTrace();

+            throw new IllegalStateException(e.getMessage());

+        }

+    }

 

 }

diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestSuiteCaseGenerator.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestSuiteCaseGenerator.java
index 258db22..75a325e 100644
--- a/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestSuiteCaseGenerator.java
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestSuiteCaseGenerator.java
@@ -13,89 +13,87 @@
 import edu.uci.ics.hivesterix.runtime.config.ConfUtil;
 
 public class PerfTestSuiteCaseGenerator extends AbstractPerfTestCase {
-	private File resultFile;
-	private FileSystem dfs;
+    private File resultFile;
+    private FileSystem dfs;
 
-	PerfTestSuiteCaseGenerator(File queryFile, File resultFile) {
-		super("testRuntimeFunction", queryFile);
-		this.queryFile = queryFile;
-		this.resultFile = resultFile;
-	}
+    PerfTestSuiteCaseGenerator(File queryFile, File resultFile) {
+        super("testRuntimeFunction", queryFile);
+        this.queryFile = queryFile;
+        this.resultFile = resultFile;
+    }
 
-	@Test
-	public void testRuntimeFunction() throws Exception {
-		StringBuilder queryString = new StringBuilder();
-		readFileToString(queryFile, queryString);
-		String[] queries = queryString.toString().split(";");
+    @Test
+    public void testRuntimeFunction() throws Exception {
+        StringBuilder queryString = new StringBuilder();
+        readFileToString(queryFile, queryString);
+        String[] queries = queryString.toString().split(";");
 
-		HiveConf hconf = ConfUtil.getHiveConf();
-		Driver driver = new Driver(hconf);
-		driver.init();
+        HiveConf hconf = ConfUtil.getHiveConf();
+        Driver driver = new Driver(hconf);
+        driver.init();
 
-		dfs = FileSystem.get(ConfUtil.getJobConf());
+        dfs = FileSystem.get(ConfUtil.getJobConf());
 
-		long startTime = System.currentTimeMillis();
-		int i = 0;
-		for (String query : queries) {
-			if (i == queries.length - 1)
-				break;
-			driver.run(query);
-			// driver.clear();
-			i++;
-		}
-		long endTime = System.currentTimeMillis();
-		System.out.println(resultFile.getName() + " execution time "
-				+ (endTime - startTime));
+        long startTime = System.currentTimeMillis();
+        int i = 0;
+        for (String query : queries) {
+            if (i == queries.length - 1)
+                break;
+            driver.run(query);
+            // driver.clear();
+            i++;
+        }
+        long endTime = System.currentTimeMillis();
+        System.out.println(resultFile.getName() + " execution time " + (endTime - startTime));
 
-		String warehouse = hconf.get("hive.metastore.warehouse.dir");
-		String tableName = removeExt(resultFile.getName());
-		String directory = warehouse + "/" + tableName + "/";
-		String localDirectory = "tmp";
+        String warehouse = hconf.get("hive.metastore.warehouse.dir");
+        String tableName = removeExt(resultFile.getName());
+        String directory = warehouse + "/" + tableName + "/";
+        String localDirectory = "tmp";
 
-		FileStatus[] files = dfs.listStatus(new Path(directory));
-		FileSystem lfs = null;
-		if (files == null) {
-			lfs = FileSystem.getLocal(ConfUtil.getJobConf());
-			files = lfs.listStatus(new Path(directory));
-		}
+        FileStatus[] files = dfs.listStatus(new Path(directory));
+        FileSystem lfs = null;
+        if (files == null) {
+            lfs = FileSystem.getLocal(ConfUtil.getJobConf());
+            files = lfs.listStatus(new Path(directory));
+        }
 
-		File resultDirectory = new File(localDirectory + "/" + tableName);
-		deleteDir(resultDirectory);
-		resultDirectory.mkdir();
+        File resultDirectory = new File(localDirectory + "/" + tableName);
+        deleteDir(resultDirectory);
+        resultDirectory.mkdir();
 
-		for (FileStatus fs : files) {
-			Path src = fs.getPath();
-			if (src.getName().indexOf("crc") >= 0)
-				continue;
+        for (FileStatus fs : files) {
+            Path src = fs.getPath();
+            if (src.getName().indexOf("crc") >= 0)
+                continue;
 
-			String destStr = localDirectory + "/" + tableName + "/"
-					+ src.getName();
-			Path dest = new Path(destStr);
-			if (lfs != null) {
-				lfs.copyToLocalFile(src, dest);
-				dfs.copyFromLocalFile(dest, new Path(directory));
-			} else
-				dfs.copyToLocalFile(src, dest);
-		}
+            String destStr = localDirectory + "/" + tableName + "/" + src.getName();
+            Path dest = new Path(destStr);
+            if (lfs != null) {
+                lfs.copyToLocalFile(src, dest);
+                dfs.copyFromLocalFile(dest, new Path(directory));
+            } else
+                dfs.copyToLocalFile(src, dest);
+        }
 
-		File[] rFiles = resultDirectory.listFiles();
-		StringBuilder sb = new StringBuilder();
-		for (File r : rFiles) {
-			if (r.getName().indexOf("crc") >= 0)
-				continue;
-			readFileToString(r, sb);
-		}
-		deleteDir(resultDirectory);
+        File[] rFiles = resultDirectory.listFiles();
+        StringBuilder sb = new StringBuilder();
+        for (File r : rFiles) {
+            if (r.getName().indexOf("crc") >= 0)
+                continue;
+            readFileToString(r, sb);
+        }
+        deleteDir(resultDirectory);
 
-		writeStringToFile(resultFile, sb);
-	}
+        writeStringToFile(resultFile, sb);
+    }
 
-	private void deleteDir(File resultDirectory) {
-		if (resultDirectory.exists()) {
-			File[] rFiles = resultDirectory.listFiles();
-			for (File r : rFiles)
-				r.delete();
-			resultDirectory.delete();
-		}
-	}
+    private void deleteDir(File resultDirectory) {
+        if (resultDirectory.exists()) {
+            File[] rFiles = resultDirectory.listFiles();
+            for (File r : rFiles)
+                r.delete();
+            resultDirectory.delete();
+        }
+    }
 }
diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestSuiteGenerator.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestSuiteGenerator.java
index 0a27ca2..aa38014 100644
--- a/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestSuiteGenerator.java
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestSuiteGenerator.java
@@ -9,66 +9,66 @@
 

 public class PerfTestSuiteGenerator extends AbstractPerfTestSuiteClass {

 

-	private static final String PATH_TO_QUERIES = "src/test/resources/perf/queries/";

-	private static final String PATH_TO_RESULTS = "src/test/resources/perf/results/";

-	private static final String PATH_TO_IGNORES = "src/test/resources/perf/ignore.txt";

+    private static final String PATH_TO_QUERIES = "src/test/resources/perf/queries/";

+    private static final String PATH_TO_RESULTS = "src/test/resources/perf/results/";

+    private static final String PATH_TO_IGNORES = "src/test/resources/perf/ignore.txt";

 

-	private static final String FILE_EXTENSION_OF_RESULTS = "result";

+    private static final String FILE_EXTENSION_OF_RESULTS = "result";

 

-	public static Test suite() throws Exception {

-		List<String> ignores = getIgnoreList(PATH_TO_IGNORES);

-		File testData = new File(PATH_TO_QUERIES);

-		File[] queries = testData.listFiles();

-		PerfTestSuiteGenerator testSuite = new PerfTestSuiteGenerator();

+    public static Test suite() throws Exception {

+        List<String> ignores = getIgnoreList(PATH_TO_IGNORES);

+        File testData = new File(PATH_TO_QUERIES);

+        File[] queries = testData.listFiles();

+        PerfTestSuiteGenerator testSuite = new PerfTestSuiteGenerator();

 

-		// set hdfs and hyracks cluster, and load test data to hdfs

-		try {

-			testSuite.setup();

-			testSuite.loadData();

-		} catch (Exception e) {

-			e.printStackTrace();

-			throw new IllegalStateException(e.getMessage());

-		}

+        // set hdfs and hyracks cluster, and load test data to hdfs

+        try {

+            testSuite.setup();

+            testSuite.loadData();

+        } catch (Exception e) {

+            e.printStackTrace();

+            throw new IllegalStateException(e.getMessage());

+        }

 

-		for (File qFile : queries) {

-			if (isIgnored(qFile.getName(), ignores))

-				continue;

+        for (File qFile : queries) {

+            if (isIgnored(qFile.getName(), ignores))

+                continue;

 

-			if (qFile.isFile() && qFile.getName().startsWith("q18_")) {

-				String resultFileName = hiveExtToResExt(qFile.getName());

-				File rFile = new File(PATH_TO_RESULTS + resultFileName);

-				testSuite.addTest(new PerfTestSuiteCaseGenerator(qFile, rFile));

-			}

-		}

-		return testSuite;

-	}

+            if (qFile.isFile() && qFile.getName().startsWith("q18_")) {

+                String resultFileName = hiveExtToResExt(qFile.getName());

+                File rFile = new File(PATH_TO_RESULTS + resultFileName);

+                testSuite.addTest(new PerfTestSuiteCaseGenerator(qFile, rFile));

+            }

+        }

+        return testSuite;

+    }

 

-	private static String hiveExtToResExt(String fname) {

-		int dot = fname.lastIndexOf('.');

-		return fname.substring(0, dot + 1) + FILE_EXTENSION_OF_RESULTS;

-	}

+    private static String hiveExtToResExt(String fname) {

+        int dot = fname.lastIndexOf('.');

+        return fname.substring(0, dot + 1) + FILE_EXTENSION_OF_RESULTS;

+    }

 

-	/**

-	 * Runs the tests and collects their result in a TestResult.

-	 */

-	@Override

-	public void run(TestResult result) {

+    /**

+     * Runs the tests and collects their result in a TestResult.

+     */

+    @Override

+    public void run(TestResult result) {

 

-		int testCount = countTestCases();

-		for (int i = 0; i < testCount; i++) {

-			Test each = this.testAt(i);

-			if (result.shouldStop())

-				break;

-			runTest(each, result);

-		}

+        int testCount = countTestCases();

+        for (int i = 0; i < testCount; i++) {

+            Test each = this.testAt(i);

+            if (result.shouldStop())

+                break;

+            runTest(each, result);

+        }

 

-		// cleanup hdfs and hyracks cluster

-		try {

-			cleanup();

-		} catch (Exception e) {

-			e.printStackTrace();

-			throw new IllegalStateException(e.getMessage());

-		}

-	}

+        // cleanup hdfs and hyracks cluster

+        try {

+            cleanup();

+        } catch (Exception e) {

+            e.printStackTrace();

+            throw new IllegalStateException(e.getMessage());

+        }

+    }

 

 }

diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/base/AbstractPerfTestCase.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/base/AbstractPerfTestCase.java
index f55d6a1..7e7db36 100644
--- a/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/base/AbstractPerfTestCase.java
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/base/AbstractPerfTestCase.java
@@ -10,43 +10,40 @@
 import junit.framework.TestCase;

 

 public class AbstractPerfTestCase extends TestCase {

-	protected File queryFile;

+    protected File queryFile;

 

-	public AbstractPerfTestCase(String testName, File queryFile) {

-		super(testName);

-	}

+    public AbstractPerfTestCase(String testName, File queryFile) {

+        super(testName);

+    }

 

-	protected static void readFileToString(File file, StringBuilder buf)

-			throws Exception {

-		BufferedReader result = new BufferedReader(new FileReader(file));

-		while (true) {

-			String s = result.readLine();

-			if (s == null) {

-				break;

-			} else {

-				buf.append(s);

-				buf.append('\n');

-			}

-		}

-		result.close();

-	}

+    protected static void readFileToString(File file, StringBuilder buf) throws Exception {

+        BufferedReader result = new BufferedReader(new FileReader(file));

+        while (true) {

+            String s = result.readLine();

+            if (s == null) {

+                break;

+            } else {

+                buf.append(s);

+                buf.append('\n');

+            }

+        }

+        result.close();

+    }

 

-	protected static void writeStringToFile(File file, StringWriter buf)

-			throws Exception {

-		PrintWriter result = new PrintWriter(new FileWriter(file));

-		result.print(buf);

-		result.close();

-	}

+    protected static void writeStringToFile(File file, StringWriter buf) throws Exception {

+        PrintWriter result = new PrintWriter(new FileWriter(file));

+        result.print(buf);

+        result.close();

+    }

 

-	protected static void writeStringToFile(File file, StringBuilder buf)

-			throws Exception {

-		PrintWriter result = new PrintWriter(new FileWriter(file));

-		result.print(buf);

-		result.close();

-	}

+    protected static void writeStringToFile(File file, StringBuilder buf) throws Exception {

+        PrintWriter result = new PrintWriter(new FileWriter(file));

+        result.print(buf);

+        result.close();

+    }

 

-	protected static String removeExt(String fname) {

-		int dot = fname.lastIndexOf('.');

-		return fname.substring(0, dot);

-	}

+    protected static String removeExt(String fname) {

+        int dot = fname.lastIndexOf('.');

+        return fname.substring(0, dot);

+    }

 }

diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/base/AbstractPerfTestSuiteClass.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/base/AbstractPerfTestSuiteClass.java
index 05474ca..08797d5 100644
--- a/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/base/AbstractPerfTestSuiteClass.java
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/base/AbstractPerfTestSuiteClass.java
@@ -32,179 +32,175 @@
 @SuppressWarnings("deprecation")

 public abstract class AbstractPerfTestSuiteClass extends TestSuite {

 

-	private static final String PATH_TO_HADOOP_CONF = "src/test/resources/perf/hadoop/conf";

-	private static final String PATH_TO_HIVE_CONF = "src/test/resources/perf/hive/conf/hive-default.xml";

-	private static final String PATH_TO_DATA = "src/test/resources/perf/data/";

+    private static final String PATH_TO_HADOOP_CONF = "src/test/resources/perf/hadoop/conf";

+    private static final String PATH_TO_HIVE_CONF = "src/test/resources/perf/hive/conf/hive-default.xml";

+    private static final String PATH_TO_DATA = "src/test/resources/perf/data/";

 

-	private MiniDFSCluster dfsCluster;

-	private MiniMRCluster mrCluster;

+    private MiniDFSCluster dfsCluster;

+    private MiniMRCluster mrCluster;

 

-	private JobConf conf = new JobConf();

-	protected FileSystem dfs;

+    private JobConf conf = new JobConf();

+    protected FileSystem dfs;

 

-	private int numberOfNC = 2;

-	private ClusterControllerService cc;

-	private Map<String, NodeControllerService> ncs = new HashMap<String, NodeControllerService>();

+    private int numberOfNC = 2;

+    private ClusterControllerService cc;

+    private Map<String, NodeControllerService> ncs = new HashMap<String, NodeControllerService>();

 

-	/**

-	 * setup cluster

-	 * 

-	 * @throws IOException

-	 */

-	protected void setup() throws Exception {

-		setupHdfs();

-		setupHyracks();

-	}

+    /**

+     * setup cluster

+     * 

+     * @throws IOException

+     */

+    protected void setup() throws Exception {

+        setupHdfs();

+        setupHyracks();

+    }

 

-	private void setupHdfs() throws IOException {

-		conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/core-site.xml"));

-		conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/mapred-site.xml"));

-		conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/hdfs-site.xml"));

-		HiveConf hconf = new HiveConf(SessionState.class);

-		hconf.addResource(new Path(PATH_TO_HIVE_CONF));

+    private void setupHdfs() throws IOException {

+        conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/core-site.xml"));

+        conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/mapred-site.xml"));

+        conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/hdfs-site.xml"));

+        HiveConf hconf = new HiveConf(SessionState.class);

+        hconf.addResource(new Path(PATH_TO_HIVE_CONF));

 

-		FileSystem lfs = FileSystem.getLocal(new Configuration());

-		lfs.delete(new Path("build"), true);

-		lfs.delete(new Path("metastore_db"), true);

+        FileSystem lfs = FileSystem.getLocal(new Configuration());

+        lfs.delete(new Path("build"), true);

+        lfs.delete(new Path("metastore_db"), true);

 

-		System.setProperty("hadoop.log.dir", "logs");

-		dfsCluster = new MiniDFSCluster(hconf, numberOfNC, true, null);

-		dfs = dfsCluster.getFileSystem();

+        System.setProperty("hadoop.log.dir", "logs");

+        dfsCluster = new MiniDFSCluster(hconf, numberOfNC, true, null);

+        dfs = dfsCluster.getFileSystem();

 

-		mrCluster = new MiniMRCluster(2, dfs.getUri().toString(), 1);

-		hconf.setVar(HiveConf.ConfVars.HADOOPJT,

-				"localhost:" + mrCluster.getJobTrackerPort());

-		hconf.setInt("mapred.min.split.size", 1342177280);

+        mrCluster = new MiniMRCluster(2, dfs.getUri().toString(), 1);

+        hconf.setVar(HiveConf.ConfVars.HADOOPJT, "localhost:" + mrCluster.getJobTrackerPort());

+        hconf.setInt("mapred.min.split.size", 1342177280);

 

-		conf = new JobConf(hconf);

-		ConfUtil.setJobConf(conf);

+        conf = new JobConf(hconf);

+        ConfUtil.setJobConf(conf);

 

-		String fsName = conf.get("fs.default.name");

-		hconf.set("hive.metastore.warehouse.dir",

-				fsName.concat("/tmp/hivesterix"));

-		String warehouse = hconf.get("hive.metastore.warehouse.dir");

-		dfs.mkdirs(new Path(warehouse));

-		ConfUtil.setHiveConf(hconf);

-	}

+        String fsName = conf.get("fs.default.name");

+        hconf.set("hive.metastore.warehouse.dir", fsName.concat("/tmp/hivesterix"));

+        String warehouse = hconf.get("hive.metastore.warehouse.dir");

+        dfs.mkdirs(new Path(warehouse));

+        ConfUtil.setHiveConf(hconf);

+    }

 

-	private void setupHyracks() throws Exception {

-		// read hive conf

-		HiveConf hconf = new HiveConf(SessionState.class);

-		hconf.addResource(new Path(PATH_TO_HIVE_CONF));

-		SessionState.start(hconf);

-		String ipAddress = hconf.get("hive.hyracks.host");

-		int clientPort = Integer.parseInt(hconf.get("hive.hyracks.port"));

-		int clusterPort = clientPort;

-		String applicationName = hconf.get("hive.hyracks.app");

+    private void setupHyracks() throws Exception {

+        // read hive conf

+        HiveConf hconf = new HiveConf(SessionState.class);

+        hconf.addResource(new Path(PATH_TO_HIVE_CONF));

+        SessionState.start(hconf);

+        String ipAddress = hconf.get("hive.hyracks.host");

+        int clientPort = Integer.parseInt(hconf.get("hive.hyracks.port"));

+        int clusterPort = clientPort;

+        String applicationName = hconf.get("hive.hyracks.app");

 

-		// start hyracks cc

-		CCConfig ccConfig = new CCConfig();

-		ccConfig.clientNetIpAddress = ipAddress;

-		ccConfig.clientNetPort = clientPort;

-		ccConfig.clusterNetPort = clusterPort;

-		ccConfig.profileDumpPeriod = 1000;

-		ccConfig.heartbeatPeriod = 200000000;

-		ccConfig.maxHeartbeatLapsePeriods = 200000000;

-		cc = new ClusterControllerService(ccConfig);

-		cc.start();

+        // start hyracks cc

+        CCConfig ccConfig = new CCConfig();

+        ccConfig.clientNetIpAddress = ipAddress;

+        ccConfig.clientNetPort = clientPort;

+        ccConfig.clusterNetPort = clusterPort;

+        ccConfig.profileDumpPeriod = 1000;

+        ccConfig.heartbeatPeriod = 200000000;

+        ccConfig.maxHeartbeatLapsePeriods = 200000000;

+        cc = new ClusterControllerService(ccConfig);

+        cc.start();

 

-		// start hyracks nc

-		for (int i = 0; i < numberOfNC; i++) {

-			NCConfig ncConfig = new NCConfig();

-			ncConfig.ccHost = ipAddress;

-			ncConfig.clusterNetIPAddress = ipAddress;

-			ncConfig.ccPort = clientPort;

-			ncConfig.dataIPAddress = "127.0.0.1";

-			ncConfig.nodeId = "nc" + i;

-			NodeControllerService nc = new NodeControllerService(ncConfig);

-			nc.start();

-			ncs.put(ncConfig.nodeId, nc);

-		}

+        // start hyracks nc

+        for (int i = 0; i < numberOfNC; i++) {

+            NCConfig ncConfig = new NCConfig();

+            ncConfig.ccHost = ipAddress;

+            ncConfig.clusterNetIPAddress = ipAddress;

+            ncConfig.ccPort = clientPort;

+            ncConfig.dataIPAddress = "127.0.0.1";

+            ncConfig.nodeId = "nc" + i;

+            NodeControllerService nc = new NodeControllerService(ncConfig);

+            nc.start();

+            ncs.put(ncConfig.nodeId, nc);

+        }

 

-		IHyracksClientConnection hcc = new HyracksConnection(

-				ccConfig.clientNetIpAddress, clientPort);

-		hcc.createApplication(applicationName, null);

-	}

+        IHyracksClientConnection hcc = new HyracksConnection(ccConfig.clientNetIpAddress, clientPort);

+        hcc.createApplication(applicationName, null);

+    }

 

-	protected void makeDir(String path) throws IOException {

-		dfs.mkdirs(new Path(path));

-	}

+    protected void makeDir(String path) throws IOException {

+        dfs.mkdirs(new Path(path));

+    }

 

-	protected void loadFiles(String src, String dest) throws IOException {

-		dfs.copyFromLocalFile(new Path(src), new Path(dest));

-	}

+    protected void loadFiles(String src, String dest) throws IOException {

+        dfs.copyFromLocalFile(new Path(src), new Path(dest));

+    }

 

-	protected void cleanup() throws Exception {

-		cleanupHdfs();

-		cleanupHyracks();

-	}

+    protected void cleanup() throws Exception {

+        cleanupHdfs();

+        cleanupHyracks();

+    }

 

-	/**

-	 * cleanup hdfs cluster

-	 */

-	private void cleanupHdfs() throws IOException {

-		dfs.delete(new Path("/"), true);

-		FileSystem.closeAll();

-		dfsCluster.shutdown();

-	}

+    /**

+     * cleanup hdfs cluster

+     */

+    private void cleanupHdfs() throws IOException {

+        dfs.delete(new Path("/"), true);

+        FileSystem.closeAll();

+        dfsCluster.shutdown();

+    }

 

-	/**

-	 * cleanup hyracks cluster

-	 */

-	private void cleanupHyracks() throws Exception {

-		Iterator<NodeControllerService> iterator = ncs.values().iterator();

-		while (iterator.hasNext()) {

-			NodeControllerService nc = iterator.next();

-			nc.stop();

-		}

-		cc.stop();

-	}

+    /**

+     * cleanup hyracks cluster

+     */

+    private void cleanupHyracks() throws Exception {

+        Iterator<NodeControllerService> iterator = ncs.values().iterator();

+        while (iterator.hasNext()) {

+            NodeControllerService nc = iterator.next();

+            nc.stop();

+        }

+        cc.stop();

+    }

 

-	protected static List<String> getIgnoreList(String ignorePath)

-			throws FileNotFoundException, IOException {

-		BufferedReader reader = new BufferedReader(new FileReader(ignorePath));

-		String s = null;

-		List<String> ignores = new ArrayList<String>();

-		while ((s = reader.readLine()) != null) {

-			ignores.add(s);

-		}

-		reader.close();

-		return ignores;

-	}

+    protected static List<String> getIgnoreList(String ignorePath) throws FileNotFoundException, IOException {

+        BufferedReader reader = new BufferedReader(new FileReader(ignorePath));

+        String s = null;

+        List<String> ignores = new ArrayList<String>();

+        while ((s = reader.readLine()) != null) {

+            ignores.add(s);

+        }

+        reader.close();

+        return ignores;

+    }

 

-	protected static boolean isIgnored(String q, List<String> ignoreList) {

-		for (String ignore : ignoreList) {

-			if (ignore.equals(q)) {

-				return true;

-			}

-		}

-		return false;

-	}

+    protected static boolean isIgnored(String q, List<String> ignoreList) {

+        for (String ignore : ignoreList) {

+            if (ignore.equals(q)) {

+                return true;

+            }

+        }

+        return false;

+    }

 

-	protected void loadData() throws IOException {

+    protected void loadData() throws IOException {

 

-		makeDir("/tpch");

-		makeDir("/tpch/customer");

-		makeDir("/tpch/lineitem");

-		makeDir("/tpch/orders");

-		makeDir("/tpch/part");

-		makeDir("/tpch/partsupp");

-		makeDir("/tpch/supplier");

-		makeDir("/tpch/nation");

-		makeDir("/tpch/region");

+        makeDir("/tpch");

+        makeDir("/tpch/customer");

+        makeDir("/tpch/lineitem");

+        makeDir("/tpch/orders");

+        makeDir("/tpch/part");

+        makeDir("/tpch/partsupp");

+        makeDir("/tpch/supplier");

+        makeDir("/tpch/nation");

+        makeDir("/tpch/region");

 

-		makeDir("/jarod");

+        makeDir("/jarod");

 

-		loadFiles(PATH_TO_DATA + "customer.tbl", "/tpch/customer/");

-		loadFiles(PATH_TO_DATA + "lineitem.tbl", "/tpch/lineitem/");

-		loadFiles(PATH_TO_DATA + "orders.tbl", "/tpch/orders/");

-		loadFiles(PATH_TO_DATA + "part.tbl", "/tpch/part/");

-		loadFiles(PATH_TO_DATA + "partsupp.tbl", "/tpch/partsupp/");

-		loadFiles(PATH_TO_DATA + "supplier.tbl", "/tpch/supplier/");

-		loadFiles(PATH_TO_DATA + "nation.tbl", "/tpch/nation/");

-		loadFiles(PATH_TO_DATA + "region.tbl", "/tpch/region/");

+        loadFiles(PATH_TO_DATA + "customer.tbl", "/tpch/customer/");

+        loadFiles(PATH_TO_DATA + "lineitem.tbl", "/tpch/lineitem/");

+        loadFiles(PATH_TO_DATA + "orders.tbl", "/tpch/orders/");

+        loadFiles(PATH_TO_DATA + "part.tbl", "/tpch/part/");

+        loadFiles(PATH_TO_DATA + "partsupp.tbl", "/tpch/partsupp/");

+        loadFiles(PATH_TO_DATA + "supplier.tbl", "/tpch/supplier/");

+        loadFiles(PATH_TO_DATA + "nation.tbl", "/tpch/nation/");

+        loadFiles(PATH_TO_DATA + "region.tbl", "/tpch/region/");

 

-		loadFiles(PATH_TO_DATA + "ext-gby.tbl", "/jarod/");

-	}

+        loadFiles(PATH_TO_DATA + "ext-gby.tbl", "/jarod/");

+    }

 

 }

diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/base/AbstractHivesterixTestCase.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/base/AbstractHivesterixTestCase.java
index 560cef7..ae5fa05 100644
--- a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/base/AbstractHivesterixTestCase.java
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/base/AbstractHivesterixTestCase.java
@@ -10,43 +10,40 @@
 import junit.framework.TestCase;

 

 public class AbstractHivesterixTestCase extends TestCase {

-	protected File queryFile;

+    protected File queryFile;

 

-	public AbstractHivesterixTestCase(String testName, File queryFile) {

-		super(testName);

-	}

+    public AbstractHivesterixTestCase(String testName, File queryFile) {

+        super(testName);

+    }

 

-	protected static void readFileToString(File file, StringBuilder buf)

-			throws Exception {

-		BufferedReader result = new BufferedReader(new FileReader(file));

-		while (true) {

-			String s = result.readLine();

-			if (s == null) {

-				break;

-			} else {

-				buf.append(s);

-				buf.append('\n');

-			}

-		}

-		result.close();

-	}

+    protected static void readFileToString(File file, StringBuilder buf) throws Exception {

+        BufferedReader result = new BufferedReader(new FileReader(file));

+        while (true) {

+            String s = result.readLine();

+            if (s == null) {

+                break;

+            } else {

+                buf.append(s);

+                buf.append('\n');

+            }

+        }

+        result.close();

+    }

 

-	protected static void writeStringToFile(File file, StringWriter buf)

-			throws Exception {

-		PrintWriter result = new PrintWriter(new FileWriter(file));

-		result.print(buf);

-		result.close();

-	}

+    protected static void writeStringToFile(File file, StringWriter buf) throws Exception {

+        PrintWriter result = new PrintWriter(new FileWriter(file));

+        result.print(buf);

+        result.close();

+    }

 

-	protected static void writeStringToFile(File file, StringBuilder buf)

-			throws Exception {

-		PrintWriter result = new PrintWriter(new FileWriter(file));

-		result.print(buf);

-		result.close();

-	}

+    protected static void writeStringToFile(File file, StringBuilder buf) throws Exception {

+        PrintWriter result = new PrintWriter(new FileWriter(file));

+        result.print(buf);

+        result.close();

+    }

 

-	protected static String removeExt(String fname) {

-		int dot = fname.lastIndexOf('.');

-		return fname.substring(0, dot);

-	}

+    protected static String removeExt(String fname) {

+        int dot = fname.lastIndexOf('.');

+        return fname.substring(0, dot);

+    }

 }

diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/base/AbstractTestSuiteClass.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/base/AbstractTestSuiteClass.java
index e9a5736..4d3128b 100644
--- a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/base/AbstractTestSuiteClass.java
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/base/AbstractTestSuiteClass.java
@@ -33,185 +33,180 @@
 @SuppressWarnings("deprecation")

 public abstract class AbstractTestSuiteClass extends TestSuite {

 

-	private static final String PATH_TO_HADOOP_CONF = "src/test/resources/runtimefunctionts/hadoop/conf";

-	private static final String PATH_TO_HIVE_CONF = "src/test/resources/runtimefunctionts/hive/conf/hive-default.xml";

+    private static final String PATH_TO_HADOOP_CONF = "src/test/resources/runtimefunctionts/hadoop/conf";

+    private static final String PATH_TO_HIVE_CONF = "src/test/resources/runtimefunctionts/hive/conf/hive-default.xml";

 

-	private static final String PATH_TO_CLUSTER_CONF = "src/test/resources/runtimefunctionts/hive/conf/topology.xml";

-	private static final String PATH_TO_DATA = "src/test/resources/runtimefunctionts/data/";

+    private static final String PATH_TO_CLUSTER_CONF = "src/test/resources/runtimefunctionts/hive/conf/topology.xml";

+    private static final String PATH_TO_DATA = "src/test/resources/runtimefunctionts/data/";

 

-	private MiniDFSCluster dfsCluster;

-	private MiniMRCluster mrCluster;

+    private MiniDFSCluster dfsCluster;

+    private MiniMRCluster mrCluster;

 

-	private JobConf conf = new JobConf();

-	protected FileSystem dfs;

+    private JobConf conf = new JobConf();

+    protected FileSystem dfs;

 

-	private int numberOfNC = 2;

-	private ClusterControllerService cc;

-	private Map<String, NodeControllerService> ncs = new HashMap<String, NodeControllerService>();

+    private int numberOfNC = 2;

+    private ClusterControllerService cc;

+    private Map<String, NodeControllerService> ncs = new HashMap<String, NodeControllerService>();

 

-	/**

-	 * setup cluster

-	 * 

-	 * @throws IOException

-	 */

-	protected void setup() throws Exception {

-		setupHdfs();

-		setupHyracks();

-	}

+    /**

+     * setup cluster

+     * 

+     * @throws IOException

+     */

+    protected void setup() throws Exception {

+        setupHdfs();

+        setupHyracks();

+    }

 

-	private void setupHdfs() throws IOException {

-		conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/core-site.xml"));

-		conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/mapred-site.xml"));

-		conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/hdfs-site.xml"));

-		HiveConf hconf = new HiveConf(SessionState.class);

-		hconf.addResource(new Path(PATH_TO_HIVE_CONF));

+    private void setupHdfs() throws IOException {

+        conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/core-site.xml"));

+        conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/mapred-site.xml"));

+        conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/hdfs-site.xml"));

+        HiveConf hconf = new HiveConf(SessionState.class);

+        hconf.addResource(new Path(PATH_TO_HIVE_CONF));

 

-		FileSystem lfs = FileSystem.getLocal(new Configuration());

-		lfs.delete(new Path("build"), true);

-		lfs.delete(new Path("metastore_db"), true);

+        FileSystem lfs = FileSystem.getLocal(new Configuration());

+        lfs.delete(new Path("build"), true);

+        lfs.delete(new Path("metastore_db"), true);

 

-		System.setProperty("hadoop.log.dir", "logs");

-		dfsCluster = new MiniDFSCluster(hconf, numberOfNC, true, null);

-		dfs = dfsCluster.getFileSystem();

+        System.setProperty("hadoop.log.dir", "logs");

+        dfsCluster = new MiniDFSCluster(hconf, numberOfNC, true, null);

+        dfs = dfsCluster.getFileSystem();

 

-		mrCluster = new MiniMRCluster(2, dfs.getUri().toString(), 1);

-		hconf.setVar(HiveConf.ConfVars.HADOOPJT,

-				"localhost:" + mrCluster.getJobTrackerPort());

+        mrCluster = new MiniMRCluster(2, dfs.getUri().toString(), 1);

+        hconf.setVar(HiveConf.ConfVars.HADOOPJT, "localhost:" + mrCluster.getJobTrackerPort());

 

-		conf = new JobConf(hconf);

-		ConfUtil.setJobConf(conf);

+        conf = new JobConf(hconf);

+        ConfUtil.setJobConf(conf);

 

-		String fsName = conf.get("fs.default.name");

-		hconf.set("hive.metastore.warehouse.dir",

-				fsName.concat("/tmp/hivesterix"));

-		String warehouse = hconf.get("hive.metastore.warehouse.dir");

-		dfs.mkdirs(new Path(warehouse));

-		ConfUtil.setHiveConf(hconf);

-	}

+        String fsName = conf.get("fs.default.name");

+        hconf.set("hive.metastore.warehouse.dir", fsName.concat("/tmp/hivesterix"));

+        String warehouse = hconf.get("hive.metastore.warehouse.dir");

+        dfs.mkdirs(new Path(warehouse));

+        ConfUtil.setHiveConf(hconf);

+    }

 

-	private void setupHyracks() throws Exception {

-		// read hive conf

-		HiveConf hconf = new HiveConf(SessionState.class);

-		hconf.addResource(new Path(PATH_TO_HIVE_CONF));

-		SessionState.start(hconf);

-		String ipAddress = hconf.get("hive.hyracks.host");

-		int clientPort = Integer.parseInt(hconf.get("hive.hyracks.port"));

-		int netPort = clientPort + 1;

-		String applicationName = hconf.get("hive.hyracks.app");

+    private void setupHyracks() throws Exception {

+        // read hive conf

+        HiveConf hconf = new HiveConf(SessionState.class);

+        hconf.addResource(new Path(PATH_TO_HIVE_CONF));

+        SessionState.start(hconf);

+        String ipAddress = hconf.get("hive.hyracks.host");

+        int clientPort = Integer.parseInt(hconf.get("hive.hyracks.port"));

+        int netPort = clientPort + 1;

+        String applicationName = hconf.get("hive.hyracks.app");

 

-		// start hyracks cc

-		CCConfig ccConfig = new CCConfig();

-		ccConfig.clientNetIpAddress = ipAddress;

-		ccConfig.clientNetPort = clientPort;

-		ccConfig.clusterNetPort = netPort;

-		ccConfig.profileDumpPeriod = 1000;

-		ccConfig.heartbeatPeriod = 200000000;

-		ccConfig.maxHeartbeatLapsePeriods = 200000000;

-		ccConfig.clusterTopologyDefinition = new File(PATH_TO_CLUSTER_CONF);

-		cc = new ClusterControllerService(ccConfig);

-		cc.start();

+        // start hyracks cc

+        CCConfig ccConfig = new CCConfig();

+        ccConfig.clientNetIpAddress = ipAddress;

+        ccConfig.clientNetPort = clientPort;

+        ccConfig.clusterNetPort = netPort;

+        ccConfig.profileDumpPeriod = 1000;

+        ccConfig.heartbeatPeriod = 200000000;

+        ccConfig.maxHeartbeatLapsePeriods = 200000000;

+        ccConfig.clusterTopologyDefinition = new File(PATH_TO_CLUSTER_CONF);

+        cc = new ClusterControllerService(ccConfig);

+        cc.start();

 

-		// start hyracks nc

-		for (int i = 0; i < numberOfNC; i++) {

-			NCConfig ncConfig = new NCConfig();

-			ncConfig.ccHost = ipAddress;

-			ncConfig.clusterNetIPAddress = ipAddress;

-			ncConfig.ccPort = netPort;

-			ncConfig.dataIPAddress = "127.0.0.1";

-			ncConfig.nodeId = "nc" + i;

-			NodeControllerService nc = new NodeControllerService(ncConfig);

-			nc.start();

-			ncs.put(ncConfig.nodeId, nc);

-		}

+        // start hyracks nc

+        for (int i = 0; i < numberOfNC; i++) {

+            NCConfig ncConfig = new NCConfig();

+            ncConfig.ccHost = ipAddress;

+            ncConfig.clusterNetIPAddress = ipAddress;

+            ncConfig.ccPort = netPort;

+            ncConfig.dataIPAddress = "127.0.0.1";

+            ncConfig.nodeId = "nc" + i;

+            NodeControllerService nc = new NodeControllerService(ncConfig);

+            nc.start();

+            ncs.put(ncConfig.nodeId, nc);

+        }

 

-		IHyracksClientConnection hcc = new HyracksConnection(

-				ccConfig.clientNetIpAddress, clientPort);

-		hcc.createApplication(applicationName, null);

-	}

+        IHyracksClientConnection hcc = new HyracksConnection(ccConfig.clientNetIpAddress, clientPort);

+        hcc.createApplication(applicationName, null);

+    }

 

-	protected void makeDir(String path) throws IOException {

-		dfs.mkdirs(new Path(path));

-	}

+    protected void makeDir(String path) throws IOException {

+        dfs.mkdirs(new Path(path));

+    }

 

-	protected void loadFiles(String src, String dest) throws IOException {

-		dfs.copyFromLocalFile(new Path(src), new Path(dest));

-	}

+    protected void loadFiles(String src, String dest) throws IOException {

+        dfs.copyFromLocalFile(new Path(src), new Path(dest));

+    }

 

-	protected void cleanup() throws Exception {

-		cleanupHdfs();

-		cleanupHyracks();

-	}

+    protected void cleanup() throws Exception {

+        cleanupHdfs();

+        cleanupHyracks();

+    }

 

-	/**

-	 * cleanup hdfs cluster

-	 */

-	private void cleanupHdfs() throws IOException {

-		dfs.delete(new Path("/"), true);

-		FileSystem.closeAll();

-		dfsCluster.shutdown();

-	}

+    /**

+     * cleanup hdfs cluster

+     */

+    private void cleanupHdfs() throws IOException {

+        dfs.delete(new Path("/"), true);

+        FileSystem.closeAll();

+        dfsCluster.shutdown();

+    }

 

-	/**

-	 * cleanup hyracks cluster

-	 */

-	private void cleanupHyracks() throws Exception {

-		Iterator<NodeControllerService> iterator = ncs.values().iterator();

-		while (iterator.hasNext()) {

-			NodeControllerService nc = iterator.next();

-			nc.stop();

-		}

-		cc.stop();

-	}

+    /**

+     * cleanup hyracks cluster

+     */

+    private void cleanupHyracks() throws Exception {

+        Iterator<NodeControllerService> iterator = ncs.values().iterator();

+        while (iterator.hasNext()) {

+            NodeControllerService nc = iterator.next();

+            nc.stop();

+        }

+        cc.stop();

+    }

 

-	protected static List<String> getIgnoreList(String ignorePath)

-			throws FileNotFoundException, IOException {

-		BufferedReader reader = new BufferedReader(new FileReader(ignorePath));

-		String s = null;

-		List<String> ignores = new ArrayList<String>();

-		while ((s = reader.readLine()) != null) {

-			ignores.add(s);

-		}

-		reader.close();

-		return ignores;

-	}

+    protected static List<String> getIgnoreList(String ignorePath) throws FileNotFoundException, IOException {

+        BufferedReader reader = new BufferedReader(new FileReader(ignorePath));

+        String s = null;

+        List<String> ignores = new ArrayList<String>();

+        while ((s = reader.readLine()) != null) {

+            ignores.add(s);

+        }

+        reader.close();

+        return ignores;

+    }

 

-	protected static boolean isIgnored(String q, List<String> ignoreList) {

-		for (String ignore : ignoreList) {

-			if (q.indexOf(ignore) >= 0) {

-				return true;

-			}

-		}

-		return false;

-	}

+    protected static boolean isIgnored(String q, List<String> ignoreList) {

+        for (String ignore : ignoreList) {

+            if (q.indexOf(ignore) >= 0) {

+                return true;

+            }

+        }

+        return false;

+    }

 

-	protected void loadData() throws IOException {

+    protected void loadData() throws IOException {

 

-		makeDir("/tpch");

-		makeDir("/tpch/customer");

-		makeDir("/tpch/lineitem");

-		makeDir("/tpch/orders");

-		makeDir("/tpch/part");

-		makeDir("/tpch/partsupp");

-		makeDir("/tpch/supplier");

-		makeDir("/tpch/nation");

-		makeDir("/tpch/region");

+        makeDir("/tpch");

+        makeDir("/tpch/customer");

+        makeDir("/tpch/lineitem");

+        makeDir("/tpch/orders");

+        makeDir("/tpch/part");

+        makeDir("/tpch/partsupp");

+        makeDir("/tpch/supplier");

+        makeDir("/tpch/nation");

+        makeDir("/tpch/region");

 

-		makeDir("/test");

-		makeDir("/test/joinsrc1");

-		makeDir("/test/joinsrc2");

+        makeDir("/test");

+        makeDir("/test/joinsrc1");

+        makeDir("/test/joinsrc2");

 

-		loadFiles(PATH_TO_DATA + "customer.tbl", "/tpch/customer/");

-		loadFiles(PATH_TO_DATA + "lineitem.tbl", "/tpch/lineitem/");

-		loadFiles(PATH_TO_DATA + "orders.tbl", "/tpch/orders/");

-		loadFiles(PATH_TO_DATA + "part.tbl", "/tpch/part/");

-		loadFiles(PATH_TO_DATA + "partsupp.tbl", "/tpch/partsupp/");

-		loadFiles(PATH_TO_DATA + "supplier.tbl", "/tpch/supplier/");

-		loadFiles(PATH_TO_DATA + "nation.tbl", "/tpch/nation/");

-		loadFiles(PATH_TO_DATA + "region.tbl", "/tpch/region/");

+        loadFiles(PATH_TO_DATA + "customer.tbl", "/tpch/customer/");

+        loadFiles(PATH_TO_DATA + "lineitem.tbl", "/tpch/lineitem/");

+        loadFiles(PATH_TO_DATA + "orders.tbl", "/tpch/orders/");

+        loadFiles(PATH_TO_DATA + "part.tbl", "/tpch/part/");

+        loadFiles(PATH_TO_DATA + "partsupp.tbl", "/tpch/partsupp/");

+        loadFiles(PATH_TO_DATA + "supplier.tbl", "/tpch/supplier/");

+        loadFiles(PATH_TO_DATA + "nation.tbl", "/tpch/nation/");

+        loadFiles(PATH_TO_DATA + "region.tbl", "/tpch/region/");

 

-		loadFiles(PATH_TO_DATA + "large_card_join_src.tbl", "/test/joinsrc1/");

-		loadFiles(PATH_TO_DATA + "large_card_join_src_small.tbl",

-				"/test/joinsrc2/");

-	}

+        loadFiles(PATH_TO_DATA + "large_card_join_src.tbl", "/test/joinsrc1/");

+        loadFiles(PATH_TO_DATA + "large_card_join_src_small.tbl", "/test/joinsrc2/");

+    }

 

 }

diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/datagen/RecordBalance.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/datagen/RecordBalance.java
index 800d6be..ac029b1 100644
--- a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/datagen/RecordBalance.java
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/datagen/RecordBalance.java
@@ -21,62 +21,55 @@
 @SuppressWarnings("deprecation")
 public class RecordBalance {
 
-	private static String confPath = System.getenv("HADDOP_HOME");
-	private static Path[] inputPaths = { new Path("/tpch/100x/customer"),
-			new Path("/tpch/100x/nation"), new Path("/tpch/100x/region"),
-			new Path("/tpch/100x/lineitem"), new Path("/tpch/100x/orders"),
-			new Path("/tpch/100x/part"), new Path("/tpch/100x/partsupp"),
-			new Path("/tpch/100x/supplier") };
+    private static String confPath = System.getenv("HADDOP_HOME");
+    private static Path[] inputPaths = { new Path("/tpch/100x/customer"), new Path("/tpch/100x/nation"),
+            new Path("/tpch/100x/region"), new Path("/tpch/100x/lineitem"), new Path("/tpch/100x/orders"),
+            new Path("/tpch/100x/part"), new Path("/tpch/100x/partsupp"), new Path("/tpch/100x/supplier") };
 
-	private static Path[] outputPaths = { new Path("/tpch/100/customer"),
-			new Path("/tpch/100/nation"), new Path("/tpch/100/region"),
-			new Path("/tpch/100/lineitem"), new Path("/tpch/100/orders"),
-			new Path("/tpch/100/part"), new Path("/tpch/100/partsupp"),
-			new Path("/tpch/100/supplier") };
+    private static Path[] outputPaths = { new Path("/tpch/100/customer"), new Path("/tpch/100/nation"),
+            new Path("/tpch/100/region"), new Path("/tpch/100/lineitem"), new Path("/tpch/100/orders"),
+            new Path("/tpch/100/part"), new Path("/tpch/100/partsupp"), new Path("/tpch/100/supplier") };
 
-	public static class MapRecordOnly extends MapReduceBase implements
-			Mapper<LongWritable, Text, LongWritable, Text> {
+    public static class MapRecordOnly extends MapReduceBase implements Mapper<LongWritable, Text, LongWritable, Text> {
 
-		public void map(LongWritable id, Text inputValue,
-				OutputCollector<LongWritable, Text> output, Reporter reporter)
-				throws IOException {
-			output.collect(id, inputValue);
-		}
-	}
+        public void map(LongWritable id, Text inputValue, OutputCollector<LongWritable, Text> output, Reporter reporter)
+                throws IOException {
+            output.collect(id, inputValue);
+        }
+    }
 
-	public static class ReduceRecordOnly extends MapReduceBase implements
-			Reducer<LongWritable, Text, NullWritable, Text> {
+    public static class ReduceRecordOnly extends MapReduceBase implements
+            Reducer<LongWritable, Text, NullWritable, Text> {
 
-		NullWritable key = NullWritable.get();
+        NullWritable key = NullWritable.get();
 
-		public void reduce(LongWritable inputKey, Iterator<Text> inputValue,
-				OutputCollector<NullWritable, Text> output, Reporter reporter)
-				throws IOException {
-			while (inputValue.hasNext())
-				output.collect(key, inputValue.next());
-		}
-	}
+        public void reduce(LongWritable inputKey, Iterator<Text> inputValue,
+                OutputCollector<NullWritable, Text> output, Reporter reporter) throws IOException {
+            while (inputValue.hasNext())
+                output.collect(key, inputValue.next());
+        }
+    }
 
-	public static void main(String[] args) throws IOException {
+    public static void main(String[] args) throws IOException {
 
-		for (int i = 0; i < inputPaths.length; i++) {
-			JobConf job = new JobConf(RecordBalance.class);
-			job.addResource(new Path(confPath + "/core-site.xml"));
-			job.addResource(new Path(confPath + "/mapred-site.xml"));
-			job.addResource(new Path(confPath + "/hdfs-site.xml"));
+        for (int i = 0; i < inputPaths.length; i++) {
+            JobConf job = new JobConf(RecordBalance.class);
+            job.addResource(new Path(confPath + "/core-site.xml"));
+            job.addResource(new Path(confPath + "/mapred-site.xml"));
+            job.addResource(new Path(confPath + "/hdfs-site.xml"));
 
-			job.setJobName(RecordBalance.class.getSimpleName());
-			job.setMapperClass(MapRecordOnly.class);
-			job.setReducerClass(ReduceRecordOnly.class);
-			job.setMapOutputKeyClass(LongWritable.class);
-			job.setMapOutputValueClass(Text.class);
+            job.setJobName(RecordBalance.class.getSimpleName());
+            job.setMapperClass(MapRecordOnly.class);
+            job.setReducerClass(ReduceRecordOnly.class);
+            job.setMapOutputKeyClass(LongWritable.class);
+            job.setMapOutputValueClass(Text.class);
 
-			job.setInputFormat(TextInputFormat.class);
-			FileInputFormat.setInputPaths(job, inputPaths[i]);
-			FileOutputFormat.setOutputPath(job, outputPaths[i]);
-			job.setNumReduceTasks(Integer.parseInt(args[0]));
+            job.setInputFormat(TextInputFormat.class);
+            FileInputFormat.setInputPaths(job, inputPaths[i]);
+            FileOutputFormat.setOutputPath(job, outputPaths[i]);
+            job.setNumReduceTasks(Integer.parseInt(args[0]));
 
-			JobClient.runJob(job);
-		}
-	}
+            JobClient.runJob(job);
+        }
+    }
 }
diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/legacy/LegacyTestCase.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/legacy/LegacyTestCase.java
index 9591c32..a286747 100644
--- a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/legacy/LegacyTestCase.java
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/legacy/LegacyTestCase.java
@@ -15,130 +15,128 @@
 import edu.uci.ics.hivesterix.test.base.AbstractHivesterixTestCase;
 
 public class LegacyTestCase extends AbstractHivesterixTestCase {
-	private File resultFile;
-	private FileSystem dfs;
+    private File resultFile;
+    private FileSystem dfs;
 
-	public LegacyTestCase(File queryFile, File resultFile) {
-		super("legacy", queryFile);
-		this.queryFile = queryFile;
-		this.resultFile = resultFile;
-	}
+    public LegacyTestCase(File queryFile, File resultFile) {
+        super("legacy", queryFile);
+        this.queryFile = queryFile;
+        this.resultFile = resultFile;
+    }
 
-	@Test
-	public void testRuntimeFunction() throws Exception {
-		StringBuilder queryString = new StringBuilder();
-		readFileToString(queryFile, queryString);
-		String[] queries = queryString.toString().split(";");
-		StringWriter sw = new StringWriter();
+    @Test
+    public void testRuntimeFunction() throws Exception {
+        StringBuilder queryString = new StringBuilder();
+        readFileToString(queryFile, queryString);
+        String[] queries = queryString.toString().split(";");
+        StringWriter sw = new StringWriter();
 
-		HiveConf hconf = ConfUtil.getHiveConf();
-		Driver driver = new Driver(hconf, new PrintWriter(sw));
-		driver.init();
+        HiveConf hconf = ConfUtil.getHiveConf();
+        Driver driver = new Driver(hconf, new PrintWriter(sw));
+        driver.init();
 
-		dfs = FileSystem.get(ConfUtil.getJobConf());
+        dfs = FileSystem.get(ConfUtil.getJobConf());
 
-		int i = 0;
-		for (String query : queries) {
-			if (i == queries.length - 1)
-				break;
-			driver.run(query);
-			driver.clear();
-			i++;
-		}
+        int i = 0;
+        for (String query : queries) {
+            if (i == queries.length - 1)
+                break;
+            driver.run(query);
+            driver.clear();
+            i++;
+        }
 
-		String warehouse = hconf.get("hive.metastore.warehouse.dir");
-		String tableName = removeExt(resultFile.getName());
-		String directory = warehouse + "/" + tableName + "/";
-		String localDirectory = "tmp";
+        String warehouse = hconf.get("hive.metastore.warehouse.dir");
+        String tableName = removeExt(resultFile.getName());
+        String directory = warehouse + "/" + tableName + "/";
+        String localDirectory = "tmp";
 
-		FileStatus[] files = dfs.listStatus(new Path(directory));
-		FileSystem lfs = null;
-		if (files == null) {
-			lfs = FileSystem.getLocal(ConfUtil.getJobConf());
-			files = lfs.listStatus(new Path(directory));
-		}
+        FileStatus[] files = dfs.listStatus(new Path(directory));
+        FileSystem lfs = null;
+        if (files == null) {
+            lfs = FileSystem.getLocal(ConfUtil.getJobConf());
+            files = lfs.listStatus(new Path(directory));
+        }
 
-		File resultDirectory = new File(localDirectory + "/" + tableName);
-		deleteDir(resultDirectory);
-		resultDirectory.mkdir();
+        File resultDirectory = new File(localDirectory + "/" + tableName);
+        deleteDir(resultDirectory);
+        resultDirectory.mkdir();
 
-		for (FileStatus fs : files) {
-			Path src = fs.getPath();
-			if (src.getName().indexOf("crc") >= 0)
-				continue;
+        for (FileStatus fs : files) {
+            Path src = fs.getPath();
+            if (src.getName().indexOf("crc") >= 0)
+                continue;
 
-			String destStr = localDirectory + "/" + tableName + "/"
-					+ src.getName();
-			Path dest = new Path(destStr);
-			if (lfs != null) {
-				lfs.copyToLocalFile(src, dest);
-				dfs.copyFromLocalFile(dest, new Path(directory));
-			} else
-				dfs.copyToLocalFile(src, dest);
-		}
+            String destStr = localDirectory + "/" + tableName + "/" + src.getName();
+            Path dest = new Path(destStr);
+            if (lfs != null) {
+                lfs.copyToLocalFile(src, dest);
+                dfs.copyFromLocalFile(dest, new Path(directory));
+            } else
+                dfs.copyToLocalFile(src, dest);
+        }
 
-		File[] rFiles = resultDirectory.listFiles();
-		StringBuilder sb = new StringBuilder();
-		for (File r : rFiles) {
-			if (r.getName().indexOf("crc") >= 0)
-				continue;
-			readFileToString(r, sb);
-		}
-		deleteDir(resultDirectory);
+        File[] rFiles = resultDirectory.listFiles();
+        StringBuilder sb = new StringBuilder();
+        for (File r : rFiles) {
+            if (r.getName().indexOf("crc") >= 0)
+                continue;
+            readFileToString(r, sb);
+        }
+        deleteDir(resultDirectory);
 
-		StringBuilder buf = new StringBuilder();
-		readFileToString(resultFile, buf);
-		if (!equal(buf, sb)) {
-			throw new Exception("Result for " + queryFile + " changed:\n"
-					+ sw.toString());
-		}
-	}
+        StringBuilder buf = new StringBuilder();
+        readFileToString(resultFile, buf);
+        if (!equal(buf, sb)) {
+            throw new Exception("Result for " + queryFile + " changed:\n" + sw.toString());
+        }
+    }
 
-	private void deleteDir(File resultDirectory) {
-		if (resultDirectory.exists()) {
-			File[] rFiles = resultDirectory.listFiles();
-			for (File r : rFiles)
-				r.delete();
-			resultDirectory.delete();
-		}
-	}
+    private void deleteDir(File resultDirectory) {
+        if (resultDirectory.exists()) {
+            File[] rFiles = resultDirectory.listFiles();
+            for (File r : rFiles)
+                r.delete();
+            resultDirectory.delete();
+        }
+    }
 
-	private boolean equal(StringBuilder sb1, StringBuilder sb2) {
-		String s1 = sb1.toString();
-		String s2 = sb2.toString();
-		String[] rowsOne = s1.split("\n");
-		String[] rowsTwo = s2.split("\n");
+    private boolean equal(StringBuilder sb1, StringBuilder sb2) {
+        String s1 = sb1.toString();
+        String s2 = sb2.toString();
+        String[] rowsOne = s1.split("\n");
+        String[] rowsTwo = s2.split("\n");
 
-		if (rowsOne.length != rowsTwo.length)
-			return false;
+        if (rowsOne.length != rowsTwo.length)
+            return false;
 
-		for (int i = 0; i < rowsOne.length; i++) {
-			String row1 = rowsOne[i];
-			String row2 = rowsTwo[i];
+        for (int i = 0; i < rowsOne.length; i++) {
+            String row1 = rowsOne[i];
+            String row2 = rowsTwo[i];
 
-			if (row1.equals(row2))
-				continue;
+            if (row1.equals(row2))
+                continue;
 
-			String[] fields1 = row1.split("");
-			String[] fields2 = row2.split("");
+            String[] fields1 = row1.split("");
+            String[] fields2 = row2.split("");
 
-			for (int j = 0; j < fields1.length; j++) {
-				if (fields1[j].equals(fields2[j])) {
-					continue;
-				} else if (fields1[j].indexOf('.') < 0) {
-					return false;
-				} else {
-					Float float1 = Float.parseFloat(fields1[j]);
-					Float float2 = Float.parseFloat(fields2[j]);
+            for (int j = 0; j < fields1.length; j++) {
+                if (fields1[j].equals(fields2[j])) {
+                    continue;
+                } else if (fields1[j].indexOf('.') < 0) {
+                    return false;
+                } else {
+                    Float float1 = Float.parseFloat(fields1[j]);
+                    Float float2 = Float.parseFloat(fields2[j]);
 
-					if (Math.abs(float1 - float2) == 0)
-						continue;
-					else
-						return false;
-				}
-			}
-		}
+                    if (Math.abs(float1 - float2) == 0)
+                        continue;
+                    else
+                        return false;
+                }
+            }
+        }
 
-		return true;
-	}
+        return true;
+    }
 }
diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestCase.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestCase.java
index db13676..f80405f 100644
--- a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestCase.java
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestCase.java
@@ -12,46 +12,42 @@
 import edu.uci.ics.hivesterix.test.base.AbstractHivesterixTestCase;
 
 public class OptimizerTestCase extends AbstractHivesterixTestCase {
-	private File resultFile;
+    private File resultFile;
 
-	OptimizerTestCase(File queryFile, File resultFile) {
-		super("testOptimizer", queryFile);
-		this.queryFile = queryFile;
-		this.resultFile = resultFile;
-	}
+    OptimizerTestCase(File queryFile, File resultFile) {
+        super("testOptimizer", queryFile);
+        this.queryFile = queryFile;
+        this.resultFile = resultFile;
+    }
 
-	@Test
-	public void testOptimizer() throws Exception {
-		StringBuilder queryString = new StringBuilder();
-		readFileToString(queryFile, queryString);
-		String[] queries = queryString.toString().split(";");
-		StringWriter sw = new StringWriter();
+    @Test
+    public void testOptimizer() throws Exception {
+        StringBuilder queryString = new StringBuilder();
+        readFileToString(queryFile, queryString);
+        String[] queries = queryString.toString().split(";");
+        StringWriter sw = new StringWriter();
 
-		HiveConf hconf = ConfUtil.getHiveConf();
-		Driver driver = new Driver(hconf, new PrintWriter(sw));
-		driver.init();
+        HiveConf hconf = ConfUtil.getHiveConf();
+        Driver driver = new Driver(hconf, new PrintWriter(sw));
+        driver.init();
 
-		int i = 0;
-		for (String query : queries) {
-			if (i == queries.length - 1)
-				break;
-			if (query.toLowerCase().indexOf("create") >= 0
-					|| query.toLowerCase().indexOf("drop") >= 0
-					|| query.toLowerCase().indexOf("set") >= 0
-					|| query.toLowerCase().startsWith("\n\ncreate")
-					|| query.toLowerCase().startsWith("\n\ndrop")
-					|| query.toLowerCase().startsWith("\n\nset"))
-				driver.run(query);
-			else
-				driver.compile(query);
-			driver.clear();
-			i++;
-		}
-		StringBuilder buf = new StringBuilder();
-		readFileToString(resultFile, buf);
-		if (!buf.toString().equals(sw.toString())) {
-			throw new Exception("Result for " + queryFile + " changed:\n"
-					+ sw.toString());
-		}
-	}
+        int i = 0;
+        for (String query : queries) {
+            if (i == queries.length - 1)
+                break;
+            if (query.toLowerCase().indexOf("create") >= 0 || query.toLowerCase().indexOf("drop") >= 0
+                    || query.toLowerCase().indexOf("set") >= 0 || query.toLowerCase().startsWith("\n\ncreate")
+                    || query.toLowerCase().startsWith("\n\ndrop") || query.toLowerCase().startsWith("\n\nset"))
+                driver.run(query);
+            else
+                driver.compile(query);
+            driver.clear();
+            i++;
+        }
+        StringBuilder buf = new StringBuilder();
+        readFileToString(resultFile, buf);
+        if (!buf.toString().equals(sw.toString())) {
+            throw new Exception("Result for " + queryFile + " changed:\n" + sw.toString());
+        }
+    }
 }
diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestSuitGenerator.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestSuitGenerator.java
index 217f67d..c6b788f 100644
--- a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestSuitGenerator.java
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestSuitGenerator.java
@@ -11,67 +11,65 @@
 import edu.uci.ics.hivesterix.test.base.AbstractTestSuiteClass;
 
 public class OptimizerTestSuitGenerator extends AbstractTestSuiteClass {
-	private static final String PATH_TO_QUERIES = "src/test/resources/optimizerts/queries/";
-	private static final String PATH_TO_RESULTS = "src/test/resources/optimizerts/results/";
-	private static final String PATH_TO_IGNORES = "src/test/resources/optimizerts/ignore.txt";
+    private static final String PATH_TO_QUERIES = "src/test/resources/optimizerts/queries/";
+    private static final String PATH_TO_RESULTS = "src/test/resources/optimizerts/results/";
+    private static final String PATH_TO_IGNORES = "src/test/resources/optimizerts/ignore.txt";
 
-	private static final String FILE_EXTENSION_OF_RESULTS = "plan";
+    private static final String FILE_EXTENSION_OF_RESULTS = "plan";
 
-	public static Test suite() throws UnsupportedEncodingException,
-			FileNotFoundException, IOException {
-		List<String> ignores = getIgnoreList(PATH_TO_IGNORES);
-		File testData = new File(PATH_TO_QUERIES);
-		File[] queries = testData.listFiles();
-		OptimizerTestSuitGenerator testSuite = new OptimizerTestSuitGenerator();
-		// set hdfs and hyracks cluster, and load test data to hdfs
-		try {
-			testSuite.setup();
-			testSuite.loadData();
-		} catch (Exception e) {
-			e.printStackTrace();
-			throw new IllegalStateException(e.getMessage());
-		}
+    public static Test suite() throws UnsupportedEncodingException, FileNotFoundException, IOException {
+        List<String> ignores = getIgnoreList(PATH_TO_IGNORES);
+        File testData = new File(PATH_TO_QUERIES);
+        File[] queries = testData.listFiles();
+        OptimizerTestSuitGenerator testSuite = new OptimizerTestSuitGenerator();
+        // set hdfs and hyracks cluster, and load test data to hdfs
+        try {
+            testSuite.setup();
+            testSuite.loadData();
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw new IllegalStateException(e.getMessage());
+        }
 
-		for (File qFile : queries) {
-			if (isIgnored(qFile.getName(), ignores))
-				continue;
+        for (File qFile : queries) {
+            if (isIgnored(qFile.getName(), ignores))
+                continue;
 
-			if (qFile.isFile()) {
-				String resultFileName = aqlExtToResExt(qFile.getName());
-				File rFile = new File(PATH_TO_RESULTS + resultFileName);
-				testSuite.addTest(new OptimizerTestSuiteCaseGenerator(qFile,
-						rFile));
-			}
-		}
-		return testSuite;
-	}
+            if (qFile.isFile()) {
+                String resultFileName = aqlExtToResExt(qFile.getName());
+                File rFile = new File(PATH_TO_RESULTS + resultFileName);
+                testSuite.addTest(new OptimizerTestSuiteCaseGenerator(qFile, rFile));
+            }
+        }
+        return testSuite;
+    }
 
-	private static String aqlExtToResExt(String fname) {
-		int dot = fname.lastIndexOf('.');
-		return fname.substring(0, dot + 1) + FILE_EXTENSION_OF_RESULTS;
-	}
+    private static String aqlExtToResExt(String fname) {
+        int dot = fname.lastIndexOf('.');
+        return fname.substring(0, dot + 1) + FILE_EXTENSION_OF_RESULTS;
+    }
 
-	/**
-	 * Runs the tests and collects their result in a TestResult.
-	 */
-	@Override
-	public void run(TestResult result) {
+    /**
+     * Runs the tests and collects their result in a TestResult.
+     */
+    @Override
+    public void run(TestResult result) {
 
-		int testCount = countTestCases();
-		for (int i = 0; i < testCount; i++) {
-			Test each = this.testAt(i);
-			if (result.shouldStop())
-				break;
-			runTest(each, result);
-		}
+        int testCount = countTestCases();
+        for (int i = 0; i < testCount; i++) {
+            Test each = this.testAt(i);
+            if (result.shouldStop())
+                break;
+            runTest(each, result);
+        }
 
-		// cleanup hdfs and hyracks cluster
-		try {
-			cleanup();
-		} catch (Exception e) {
-			e.printStackTrace();
-			throw new IllegalStateException(e.getMessage());
-		}
-	}
+        // cleanup hdfs and hyracks cluster
+        try {
+            cleanup();
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw new IllegalStateException(e.getMessage());
+        }
+    }
 
 }
diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestSuite.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestSuite.java
index e3a4a4e..8ac4e86 100644
--- a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestSuite.java
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestSuite.java
@@ -11,44 +11,43 @@
 
 public class OptimizerTestSuite extends AbstractTestSuiteClass {
 
-	private static final String PATH_TO_QUERIES = "src/test/resources/optimizerts/queries/";
-	private static final String PATH_TO_RESULTS = "src/test/resources/optimizerts/results/";
-	private static final String PATH_TO_IGNORES = "src/test/resources/optimizerts/ignore.txt";
+    private static final String PATH_TO_QUERIES = "src/test/resources/optimizerts/queries/";
+    private static final String PATH_TO_RESULTS = "src/test/resources/optimizerts/results/";
+    private static final String PATH_TO_IGNORES = "src/test/resources/optimizerts/ignore.txt";
 
-	private static final String FILE_EXTENSION_OF_RESULTS = "plan";
+    private static final String FILE_EXTENSION_OF_RESULTS = "plan";
 
-	public static Test suite() throws UnsupportedEncodingException,
-			FileNotFoundException, IOException {
-		List<String> ignores = getIgnoreList(PATH_TO_IGNORES);
-		File testData = new File(PATH_TO_QUERIES);
-		File[] queries = testData.listFiles();
-		OptimizerTestSuite testSuite = new OptimizerTestSuite();
+    public static Test suite() throws UnsupportedEncodingException, FileNotFoundException, IOException {
+        List<String> ignores = getIgnoreList(PATH_TO_IGNORES);
+        File testData = new File(PATH_TO_QUERIES);
+        File[] queries = testData.listFiles();
+        OptimizerTestSuite testSuite = new OptimizerTestSuite();
 
-		// set hdfs and hyracks cluster, and load test data to hdfs
-		try {
-			testSuite.setup();
-			testSuite.loadData();
-		} catch (Exception e) {
-			e.printStackTrace();
-			throw new IllegalStateException(e.getMessage());
-		}
+        // set hdfs and hyracks cluster, and load test data to hdfs
+        try {
+            testSuite.setup();
+            testSuite.loadData();
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw new IllegalStateException(e.getMessage());
+        }
 
-		for (File qFile : queries) {
-			if (isIgnored(qFile.getName(), ignores))
-				continue;
+        for (File qFile : queries) {
+            if (isIgnored(qFile.getName(), ignores))
+                continue;
 
-			if (qFile.isFile() && qFile.getName().startsWith("h11_")) {
-				String resultFileName = hiveExtToResExt(qFile.getName());
-				File rFile = new File(PATH_TO_RESULTS + resultFileName);
-				testSuite.addTest(new OptimizerTestCase(qFile, rFile));
-			}
-		}
-		return testSuite;
-	}
+            if (qFile.isFile() && qFile.getName().startsWith("h11_")) {
+                String resultFileName = hiveExtToResExt(qFile.getName());
+                File rFile = new File(PATH_TO_RESULTS + resultFileName);
+                testSuite.addTest(new OptimizerTestCase(qFile, rFile));
+            }
+        }
+        return testSuite;
+    }
 
-	private static String hiveExtToResExt(String fname) {
-		int dot = fname.lastIndexOf('.');
-		return fname.substring(0, dot + 1) + FILE_EXTENSION_OF_RESULTS;
-	}
+    private static String hiveExtToResExt(String fname) {
+        int dot = fname.lastIndexOf('.');
+        return fname.substring(0, dot + 1) + FILE_EXTENSION_OF_RESULTS;
+    }
 
 }
diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestSuiteCaseGenerator.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestSuiteCaseGenerator.java
index a86dc29..b71ecf8 100644
--- a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestSuiteCaseGenerator.java
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestSuiteCaseGenerator.java
@@ -12,42 +12,39 @@
 import edu.uci.ics.hivesterix.test.base.AbstractHivesterixTestCase;
 
 public class OptimizerTestSuiteCaseGenerator extends AbstractHivesterixTestCase {
-	private File resultFile;
+    private File resultFile;
 
-	OptimizerTestSuiteCaseGenerator(File queryFile, File resultFile) {
-		super("testOptimizer", queryFile);
-		this.queryFile = queryFile;
-		this.resultFile = resultFile;
-	}
+    OptimizerTestSuiteCaseGenerator(File queryFile, File resultFile) {
+        super("testOptimizer", queryFile);
+        this.queryFile = queryFile;
+        this.resultFile = resultFile;
+    }
 
-	@Test
-	public void testOptimizer() throws Exception {
-		StringBuilder queryString = new StringBuilder();
-		readFileToString(queryFile, queryString);
-		String[] queries = queryString.toString().split(";");
-		StringWriter sw = new StringWriter();
+    @Test
+    public void testOptimizer() throws Exception {
+        StringBuilder queryString = new StringBuilder();
+        readFileToString(queryFile, queryString);
+        String[] queries = queryString.toString().split(";");
+        StringWriter sw = new StringWriter();
 
-		HiveConf hconf = ConfUtil.getHiveConf();
-		Driver driver = new Driver(hconf, new PrintWriter(sw));
-		driver.init();
+        HiveConf hconf = ConfUtil.getHiveConf();
+        Driver driver = new Driver(hconf, new PrintWriter(sw));
+        driver.init();
 
-		int i = 0;
-		for (String query : queries) {
-			if (i == queries.length - 1)
-				break;
-			if (query.toLowerCase().indexOf("create") >= 0
-					|| query.toLowerCase().indexOf("drop") >= 0
-					|| query.toLowerCase().indexOf("set") >= 0
-					|| query.toLowerCase().startsWith("\n\ncreate")
-					|| query.toLowerCase().startsWith("\n\ndrop")
-					|| query.toLowerCase().startsWith("\n\nset"))
-				driver.run(query);
-			else
-				driver.compile(query);
-			driver.clear();
-			i++;
-		}
-		sw.close();
-		writeStringToFile(resultFile, sw);
-	}
+        int i = 0;
+        for (String query : queries) {
+            if (i == queries.length - 1)
+                break;
+            if (query.toLowerCase().indexOf("create") >= 0 || query.toLowerCase().indexOf("drop") >= 0
+                    || query.toLowerCase().indexOf("set") >= 0 || query.toLowerCase().startsWith("\n\ncreate")
+                    || query.toLowerCase().startsWith("\n\ndrop") || query.toLowerCase().startsWith("\n\nset"))
+                driver.run(query);
+            else
+                driver.compile(query);
+            driver.clear();
+            i++;
+        }
+        sw.close();
+        writeStringToFile(resultFile, sw);
+    }
 }
diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestCase.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestCase.java
index 078de9a..60a8b13 100644
--- a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestCase.java
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestCase.java
@@ -15,138 +15,133 @@
 import edu.uci.ics.hivesterix.test.base.AbstractHivesterixTestCase;
 
 public class RuntimeFunctionTestCase extends AbstractHivesterixTestCase {
-	private File resultFile;
-	private FileSystem dfs;
+    private File resultFile;
+    private FileSystem dfs;
 
-	RuntimeFunctionTestCase(File queryFile, File resultFile) {
-		super("testRuntimeFunction", queryFile);
-		this.queryFile = queryFile;
-		this.resultFile = resultFile;
-	}
+    RuntimeFunctionTestCase(File queryFile, File resultFile) {
+        super("testRuntimeFunction", queryFile);
+        this.queryFile = queryFile;
+        this.resultFile = resultFile;
+    }
 
-	@Test
-	public void testRuntimeFunction() throws Exception {
-		StringBuilder queryString = new StringBuilder();
-		readFileToString(queryFile, queryString);
-		String[] queries = queryString.toString().split(";");
-		StringWriter sw = new StringWriter();
+    @Test
+    public void testRuntimeFunction() throws Exception {
+        StringBuilder queryString = new StringBuilder();
+        readFileToString(queryFile, queryString);
+        String[] queries = queryString.toString().split(";");
+        StringWriter sw = new StringWriter();
 
-		HiveConf hconf = ConfUtil.getHiveConf();
-		Driver driver = new Driver(hconf, new PrintWriter(sw));
-		driver.init();
-		// Driver driver = new Driver(hconf);
+        HiveConf hconf = ConfUtil.getHiveConf();
+        Driver driver = new Driver(hconf, new PrintWriter(sw));
+        driver.init();
+        // Driver driver = new Driver(hconf);
 
-		dfs = FileSystem.get(ConfUtil.getJobConf());
+        dfs = FileSystem.get(ConfUtil.getJobConf());
 
-		int i = 0;
-		for (String query : queries) {
-			if (i == queries.length - 1)
-				break;
-			driver.run(query);
-			driver.clear();
-			i++;
-		}
+        int i = 0;
+        for (String query : queries) {
+            if (i == queries.length - 1)
+                break;
+            driver.run(query);
+            driver.clear();
+            i++;
+        }
 
-		String warehouse = hconf.get("hive.metastore.warehouse.dir");
-		String tableName = removeExt(resultFile.getName());
-		String directory = warehouse + "/" + tableName + "/";
-		String localDirectory = "tmp";
+        String warehouse = hconf.get("hive.metastore.warehouse.dir");
+        String tableName = removeExt(resultFile.getName());
+        String directory = warehouse + "/" + tableName + "/";
+        String localDirectory = "tmp";
 
-		FileStatus[] files = dfs.listStatus(new Path(directory));
-		FileSystem lfs = null;
-		if (files == null) {
-			lfs = FileSystem.getLocal(ConfUtil.getJobConf());
-			files = lfs.listStatus(new Path(directory));
-		}
+        FileStatus[] files = dfs.listStatus(new Path(directory));
+        FileSystem lfs = null;
+        if (files == null) {
+            lfs = FileSystem.getLocal(ConfUtil.getJobConf());
+            files = lfs.listStatus(new Path(directory));
+        }
 
-		File resultDirectory = new File(localDirectory + "/" + tableName);
-		deleteDir(resultDirectory);
-		resultDirectory.mkdir();
+        File resultDirectory = new File(localDirectory + "/" + tableName);
+        deleteDir(resultDirectory);
+        resultDirectory.mkdir();
 
-		for (FileStatus fs : files) {
-			Path src = fs.getPath();
-			if (src.getName().indexOf("crc") >= 0)
-				continue;
+        for (FileStatus fs : files) {
+            Path src = fs.getPath();
+            if (src.getName().indexOf("crc") >= 0)
+                continue;
 
-			String destStr = localDirectory + "/" + tableName + "/"
-					+ src.getName();
-			Path dest = new Path(destStr);
-			if (lfs != null) {
-				lfs.copyToLocalFile(src, dest);
-				dfs.copyFromLocalFile(dest, new Path(directory));
-			} else
-				dfs.copyToLocalFile(src, dest);
-		}
+            String destStr = localDirectory + "/" + tableName + "/" + src.getName();
+            Path dest = new Path(destStr);
+            if (lfs != null) {
+                lfs.copyToLocalFile(src, dest);
+                dfs.copyFromLocalFile(dest, new Path(directory));
+            } else
+                dfs.copyToLocalFile(src, dest);
+        }
 
-		File[] rFiles = resultDirectory.listFiles();
-		StringBuilder sb = new StringBuilder();
-		for (File r : rFiles) {
-			if (r.getName().indexOf("crc") >= 0)
-				continue;
-			readFileToString(r, sb);
-		}
+        File[] rFiles = resultDirectory.listFiles();
+        StringBuilder sb = new StringBuilder();
+        for (File r : rFiles) {
+            if (r.getName().indexOf("crc") >= 0)
+                continue;
+            readFileToString(r, sb);
+        }
 
-		StringBuilder buf = new StringBuilder();
-		readFileToString(resultFile, buf);
-		StringBuffer errorMsg = new StringBuffer();
-		if (!equal(buf, sb, errorMsg)) {
-			throw new Exception("Result for " + queryFile + " changed:\n"
-					+ errorMsg.toString());
-		}
-		deleteDir(resultDirectory);
-	}
+        StringBuilder buf = new StringBuilder();
+        readFileToString(resultFile, buf);
+        StringBuffer errorMsg = new StringBuffer();
+        if (!equal(buf, sb, errorMsg)) {
+            throw new Exception("Result for " + queryFile + " changed:\n" + errorMsg.toString());
+        }
+        deleteDir(resultDirectory);
+    }
 
-	private void deleteDir(File resultDirectory) {
-		if (resultDirectory.exists()) {
-			File[] rFiles = resultDirectory.listFiles();
-			for (File r : rFiles)
-				r.delete();
-			resultDirectory.delete();
-		}
-	}
+    private void deleteDir(File resultDirectory) {
+        if (resultDirectory.exists()) {
+            File[] rFiles = resultDirectory.listFiles();
+            for (File r : rFiles)
+                r.delete();
+            resultDirectory.delete();
+        }
+    }
 
-	private boolean equal(StringBuilder sb1, StringBuilder sb2,
-			StringBuffer errorMsg) {
-		String s1 = sb1.toString();
-		String s2 = sb2.toString();
-		String[] rowsOne = s1.split("\n");
-		String[] rowsTwo = s2.split("\n");
+    private boolean equal(StringBuilder sb1, StringBuilder sb2, StringBuffer errorMsg) {
+        String s1 = sb1.toString();
+        String s2 = sb2.toString();
+        String[] rowsOne = s1.split("\n");
+        String[] rowsTwo = s2.split("\n");
 
-		if (rowsOne.length != rowsTwo.length)
-			return false;
+        if (rowsOne.length != rowsTwo.length)
+            return false;
 
-		for (int i = 0; i < rowsOne.length; i++) {
-			String row1 = rowsOne[i];
-			String row2 = rowsTwo[i];
+        for (int i = 0; i < rowsOne.length; i++) {
+            String row1 = rowsOne[i];
+            String row2 = rowsTwo[i];
 
-			if (row1.equals(row2))
-				continue;
+            if (row1.equals(row2))
+                continue;
 
-			String[] fields1 = row1.split("");
-			String[] fields2 = row2.split("");
+            String[] fields1 = row1.split("");
+            String[] fields2 = row2.split("");
 
-			for (int j = 0; j < fields1.length; j++) {
-				if (fields1[j].equals(fields2[j])) {
-					continue;
-				} else if (fields1[j].indexOf('.') < 0) {
-					errorMsg.append("line " + i + " column " + j + ": "
-							+ fields2[j] + " expected " + fields1[j]);
-					return false;
-				} else {
-					Float float1 = Float.parseFloat(fields1[j]);
-					Float float2 = Float.parseFloat(fields2[j]);
+            for (int j = 0; j < fields1.length; j++) {
+                if (fields1[j].equals(fields2[j])) {
+                    continue;
+                } else if (fields1[j].indexOf('.') < 0) {
+                    errorMsg.append("line " + i + " column " + j + ": " + fields2[j] + " expected " + fields1[j]);
+                    return false;
+                } else {
+                    Float float1 = Float.parseFloat(fields1[j]);
+                    Float float2 = Float.parseFloat(fields2[j]);
 
-					if (Math.abs(float1 - float2) == 0)
-						continue;
-					else {
-						errorMsg.append("line " + i + " column " + j + ": "
-								+ fields2[j] + " expected " + fields1[j]);
-						return false;
-					}
-				}
-			}
-		}
+                    if (Math.abs(float1 - float2) == 0)
+                        continue;
+                    else {
+                        errorMsg.append("line " + i + " column " + j + ": " + fields2[j] + " expected " + fields1[j]);
+                        return false;
+                    }
+                }
+            }
+        }
 
-		return true;
-	}
+        return true;
+    }
 }
diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestSuite.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestSuite.java
index 2093b1d..9610497 100644
--- a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestSuite.java
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestSuite.java
@@ -9,66 +9,66 @@
 

 public class RuntimeFunctionTestSuite extends AbstractTestSuiteClass {

 

-	private static final String PATH_TO_QUERIES = "src/test/resources/runtimefunctionts/queries/";

-	private static final String PATH_TO_RESULTS = "src/test/resources/runtimefunctionts/results/";

-	private static final String PATH_TO_IGNORES = "src/test/resources/runtimefunctionts/ignore.txt";

+    private static final String PATH_TO_QUERIES = "src/test/resources/runtimefunctionts/queries/";

+    private static final String PATH_TO_RESULTS = "src/test/resources/runtimefunctionts/results/";

+    private static final String PATH_TO_IGNORES = "src/test/resources/runtimefunctionts/ignore.txt";

 

-	private static final String FILE_EXTENSION_OF_RESULTS = "result";

+    private static final String FILE_EXTENSION_OF_RESULTS = "result";

 

-	public static Test suite() throws Exception {

-		List<String> ignores = getIgnoreList(PATH_TO_IGNORES);

-		File testData = new File(PATH_TO_QUERIES);

-		File[] queries = testData.listFiles();

-		RuntimeFunctionTestSuite testSuite = new RuntimeFunctionTestSuite();

+    public static Test suite() throws Exception {

+        List<String> ignores = getIgnoreList(PATH_TO_IGNORES);

+        File testData = new File(PATH_TO_QUERIES);

+        File[] queries = testData.listFiles();

+        RuntimeFunctionTestSuite testSuite = new RuntimeFunctionTestSuite();

 

-		// set hdfs and hyracks cluster, and load test data to hdfs

-		try {

-			testSuite.setup();

-			testSuite.loadData();

-		} catch (Exception e) {

-			e.printStackTrace();

-			throw new IllegalStateException(e.getMessage());

-		}

+        // set hdfs and hyracks cluster, and load test data to hdfs

+        try {

+            testSuite.setup();

+            testSuite.loadData();

+        } catch (Exception e) {

+            e.printStackTrace();

+            throw new IllegalStateException(e.getMessage());

+        }

 

-		for (File qFile : queries) {

-			if (isIgnored(qFile.getName(), ignores))

-				continue;

+        for (File qFile : queries) {

+            if (isIgnored(qFile.getName(), ignores))

+                continue;

 

-			if (qFile.isFile()) {

-				String resultFileName = hiveExtToResExt(qFile.getName());

-				File rFile = new File(PATH_TO_RESULTS + resultFileName);

-				testSuite.addTest(new RuntimeFunctionTestCase(qFile, rFile));

-			}

-		}

-		return testSuite;

-	}

+            if (qFile.isFile()) {

+                String resultFileName = hiveExtToResExt(qFile.getName());

+                File rFile = new File(PATH_TO_RESULTS + resultFileName);

+                testSuite.addTest(new RuntimeFunctionTestCase(qFile, rFile));

+            }

+        }

+        return testSuite;

+    }

 

-	private static String hiveExtToResExt(String fname) {

-		int dot = fname.lastIndexOf('.');

-		return fname.substring(0, dot + 1) + FILE_EXTENSION_OF_RESULTS;

-	}

+    private static String hiveExtToResExt(String fname) {

+        int dot = fname.lastIndexOf('.');

+        return fname.substring(0, dot + 1) + FILE_EXTENSION_OF_RESULTS;

+    }

 

-	/**

-	 * Runs the tests and collects their result in a TestResult.

-	 */

-	@Override

-	public void run(TestResult result) {

+    /**

+     * Runs the tests and collects their result in a TestResult.

+     */

+    @Override

+    public void run(TestResult result) {

 

-		int testCount = countTestCases();

-		for (int i = 0; i < testCount; i++) {

-			Test each = this.testAt(i);

-			if (result.shouldStop())

-				break;

-			runTest(each, result);

-		}

+        int testCount = countTestCases();

+        for (int i = 0; i < testCount; i++) {

+            Test each = this.testAt(i);

+            if (result.shouldStop())

+                break;

+            runTest(each, result);

+        }

 

-		// cleanup hdfs and hyracks cluster

-		try {

-			cleanup();

-		} catch (Exception e) {

-			e.printStackTrace();

-			throw new IllegalStateException(e.getMessage());

-		}

-	}

+        // cleanup hdfs and hyracks cluster

+        try {

+            cleanup();

+        } catch (Exception e) {

+            e.printStackTrace();

+            throw new IllegalStateException(e.getMessage());

+        }

+    }

 

 }

diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestSuiteCaseGenerator.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestSuiteCaseGenerator.java
index 1b45b41..b669e28 100644
--- a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestSuiteCaseGenerator.java
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestSuiteCaseGenerator.java
@@ -14,88 +14,86 @@
 import edu.uci.ics.hivesterix.runtime.config.ConfUtil;
 import edu.uci.ics.hivesterix.test.base.AbstractHivesterixTestCase;
 
-public class RuntimeFunctionTestSuiteCaseGenerator extends
-		AbstractHivesterixTestCase {
-	private File resultFile;
-	private FileSystem dfs;
+public class RuntimeFunctionTestSuiteCaseGenerator extends AbstractHivesterixTestCase {
+    private File resultFile;
+    private FileSystem dfs;
 
-	RuntimeFunctionTestSuiteCaseGenerator(File queryFile, File resultFile) {
-		super("testRuntimeFunction", queryFile);
-		this.queryFile = queryFile;
-		this.resultFile = resultFile;
-	}
+    RuntimeFunctionTestSuiteCaseGenerator(File queryFile, File resultFile) {
+        super("testRuntimeFunction", queryFile);
+        this.queryFile = queryFile;
+        this.resultFile = resultFile;
+    }
 
-	@Test
-	public void testRuntimeFunction() throws Exception {
-		StringBuilder queryString = new StringBuilder();
-		readFileToString(queryFile, queryString);
-		String[] queries = queryString.toString().split(";");
-		StringWriter sw = new StringWriter();
+    @Test
+    public void testRuntimeFunction() throws Exception {
+        StringBuilder queryString = new StringBuilder();
+        readFileToString(queryFile, queryString);
+        String[] queries = queryString.toString().split(";");
+        StringWriter sw = new StringWriter();
 
-		HiveConf hconf = ConfUtil.getHiveConf();
-		Driver driver = new Driver(hconf, new PrintWriter(sw));
-		driver.init();
+        HiveConf hconf = ConfUtil.getHiveConf();
+        Driver driver = new Driver(hconf, new PrintWriter(sw));
+        driver.init();
 
-		dfs = FileSystem.get(ConfUtil.getJobConf());
+        dfs = FileSystem.get(ConfUtil.getJobConf());
 
-		int i = 0;
-		for (String query : queries) {
-			if (i == queries.length - 1)
-				break;
-			driver.run(query);
-			driver.clear();
-			i++;
-		}
+        int i = 0;
+        for (String query : queries) {
+            if (i == queries.length - 1)
+                break;
+            driver.run(query);
+            driver.clear();
+            i++;
+        }
 
-		String warehouse = hconf.get("hive.metastore.warehouse.dir");
-		String tableName = removeExt(resultFile.getName());
-		String directory = warehouse + "/" + tableName + "/";
-		String localDirectory = "tmp";
+        String warehouse = hconf.get("hive.metastore.warehouse.dir");
+        String tableName = removeExt(resultFile.getName());
+        String directory = warehouse + "/" + tableName + "/";
+        String localDirectory = "tmp";
 
-		FileStatus[] files = dfs.listStatus(new Path(directory));
-		FileSystem lfs = null;
-		if (files == null) {
-			lfs = FileSystem.getLocal(ConfUtil.getJobConf());
-			files = lfs.listStatus(new Path(directory));
-		}
+        FileStatus[] files = dfs.listStatus(new Path(directory));
+        FileSystem lfs = null;
+        if (files == null) {
+            lfs = FileSystem.getLocal(ConfUtil.getJobConf());
+            files = lfs.listStatus(new Path(directory));
+        }
 
-		File resultDirectory = new File(localDirectory + "/" + tableName);
-		deleteDir(resultDirectory);
-		resultDirectory.mkdir();
+        File resultDirectory = new File(localDirectory + "/" + tableName);
+        deleteDir(resultDirectory);
+        resultDirectory.mkdir();
 
-		for (FileStatus fs : files) {
-			Path src = fs.getPath();
-			if (src.getName().indexOf("crc") >= 0)
-				continue;
+        for (FileStatus fs : files) {
+            Path src = fs.getPath();
+            if (src.getName().indexOf("crc") >= 0)
+                continue;
 
-			String destStr = localDirectory + "/" + tableName + "/"
-					+ src.getName();
-			Path dest = new Path(destStr);
-			if (lfs != null) {
-				lfs.copyToLocalFile(src, dest);
-				dfs.copyFromLocalFile(dest, new Path(directory));
-			} else
-				dfs.copyToLocalFile(src, dest);
-		}
+            String destStr = localDirectory + "/" + tableName + "/" + src.getName();
+            Path dest = new Path(destStr);
+            if (lfs != null) {
+                lfs.copyToLocalFile(src, dest);
+                dfs.copyFromLocalFile(dest, new Path(directory));
+            } else
+                dfs.copyToLocalFile(src, dest);
+        }
 
-		File[] rFiles = resultDirectory.listFiles();
-		StringBuilder sb = new StringBuilder();
-		for (File r : rFiles) {
-			if (r.getName().indexOf("crc") >= 0)
-				continue;
-			readFileToString(r, sb);
-		}
-		deleteDir(resultDirectory);
+        File[] rFiles = resultDirectory.listFiles();
+        StringBuilder sb = new StringBuilder();
+        for (File r : rFiles) {
+            if (r.getName().indexOf("crc") >= 0)
+                continue;
+            readFileToString(r, sb);
+        }
+        deleteDir(resultDirectory);
 
-		writeStringToFile(resultFile, sb);
-	}
+        writeStringToFile(resultFile, sb);
+    }
 
-	private void deleteDir(File resultDirectory) {
-		if (resultDirectory.exists()) {
-			File[] rFiles = resultDirectory.listFiles();
-			for (File r : rFiles)
-				r.delete();
-			resultDirectory.delete();
-		}
-	}
+    private void deleteDir(File resultDirectory) {
+        if (resultDirectory.exists()) {
+            File[] rFiles = resultDirectory.listFiles();
+            for (File r : rFiles)
+                r.delete();
+            resultDirectory.delete();
+        }
+    }
 }
diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestSuiteGenerator.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestSuiteGenerator.java
index a67f475..ca2bd6d 100644
--- a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestSuiteGenerator.java
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestSuiteGenerator.java
@@ -9,67 +9,66 @@
 

 public class RuntimeFunctionTestSuiteGenerator extends AbstractTestSuiteClass {

 

-	private static final String PATH_TO_QUERIES = "src/test/resources/runtimefunctionts/queries/";

-	private static final String PATH_TO_RESULTS = "src/test/resources/runtimefunctionts/results/";

-	private static final String PATH_TO_IGNORES = "src/test/resources/runtimefunctionts/ignore.txt";

+    private static final String PATH_TO_QUERIES = "src/test/resources/runtimefunctionts/queries/";

+    private static final String PATH_TO_RESULTS = "src/test/resources/runtimefunctionts/results/";

+    private static final String PATH_TO_IGNORES = "src/test/resources/runtimefunctionts/ignore.txt";

 

-	private static final String FILE_EXTENSION_OF_RESULTS = "result";

+    private static final String FILE_EXTENSION_OF_RESULTS = "result";

 

-	public static Test suite() throws Exception {

-		List<String> ignores = getIgnoreList(PATH_TO_IGNORES);

-		File testData = new File(PATH_TO_QUERIES);

-		File[] queries = testData.listFiles();

-		RuntimeFunctionTestSuiteGenerator testSuite = new RuntimeFunctionTestSuiteGenerator();

+    public static Test suite() throws Exception {

+        List<String> ignores = getIgnoreList(PATH_TO_IGNORES);

+        File testData = new File(PATH_TO_QUERIES);

+        File[] queries = testData.listFiles();

+        RuntimeFunctionTestSuiteGenerator testSuite = new RuntimeFunctionTestSuiteGenerator();

 

-		// set hdfs and hyracks cluster, and load test data to hdfs

-		try {

-			testSuite.setup();

-			testSuite.loadData();

-		} catch (Exception e) {

-			e.printStackTrace();

-			throw new IllegalStateException(e.getMessage());

-		}

+        // set hdfs and hyracks cluster, and load test data to hdfs

+        try {

+            testSuite.setup();

+            testSuite.loadData();

+        } catch (Exception e) {

+            e.printStackTrace();

+            throw new IllegalStateException(e.getMessage());

+        }

 

-		for (File qFile : queries) {

-			if (isIgnored(qFile.getName(), ignores))

-				continue;

+        for (File qFile : queries) {

+            if (isIgnored(qFile.getName(), ignores))

+                continue;

 

-			if (qFile.isFile() && qFile.getName().startsWith("q16_")) {

-				String resultFileName = hiveExtToResExt(qFile.getName());

-				File rFile = new File(PATH_TO_RESULTS + resultFileName);

-				testSuite.addTest(new RuntimeFunctionTestSuiteCaseGenerator(

-						qFile, rFile));

-			}

-		}

-		return testSuite;

-	}

+            if (qFile.isFile() && qFile.getName().startsWith("q16_")) {

+                String resultFileName = hiveExtToResExt(qFile.getName());

+                File rFile = new File(PATH_TO_RESULTS + resultFileName);

+                testSuite.addTest(new RuntimeFunctionTestSuiteCaseGenerator(qFile, rFile));

+            }

+        }

+        return testSuite;

+    }

 

-	private static String hiveExtToResExt(String fname) {

-		int dot = fname.lastIndexOf('.');

-		return fname.substring(0, dot + 1) + FILE_EXTENSION_OF_RESULTS;

-	}

+    private static String hiveExtToResExt(String fname) {

+        int dot = fname.lastIndexOf('.');

+        return fname.substring(0, dot + 1) + FILE_EXTENSION_OF_RESULTS;

+    }

 

-	/**

-	 * Runs the tests and collects their result in a TestResult.

-	 */

-	@Override

-	public void run(TestResult result) {

+    /**

+     * Runs the tests and collects their result in a TestResult.

+     */

+    @Override

+    public void run(TestResult result) {

 

-		int testCount = countTestCases();

-		for (int i = 0; i < testCount; i++) {

-			Test each = this.testAt(i);

-			if (result.shouldStop())

-				break;

-			runTest(each, result);

-		}

+        int testCount = countTestCases();

+        for (int i = 0; i < testCount; i++) {

+            Test each = this.testAt(i);

+            if (result.shouldStop())

+                break;

+            runTest(each, result);

+        }

 

-		// cleanup hdfs and hyracks cluster

-		try {

-			cleanup();

-		} catch (Exception e) {

-			e.printStackTrace();

-			throw new IllegalStateException(e.getMessage());

-		}

-	}

+        // cleanup hdfs and hyracks cluster

+        try {

+            cleanup();

+        } catch (Exception e) {

+            e.printStackTrace();

+            throw new IllegalStateException(e.getMessage());

+        }

+    }

 

 }

diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/serde/SerDeTest.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/serde/SerDeTest.java
index b5db432..cd39c5a 100644
--- a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/serde/SerDeTest.java
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/serde/SerDeTest.java
@@ -43,190 +43,175 @@
 
 /**
  * TestLazySimpleSerDe.
- * 
  */
 @SuppressWarnings({ "deprecation", "rawtypes" })
 public class SerDeTest extends TestCase {
 
-	/**
-	 * Test the LazySimpleSerDe class.
-	 */
-	public void testLazySimpleSerDe() throws Throwable {
-		try {
-			// Create the SerDe
-			LazySimpleSerDe serDe = new LazySimpleSerDe();
-			Configuration conf = new Configuration();
-			Properties tbl = createProperties();
-			serDe.initialize(conf, tbl);
+    /**
+     * Test the LazySimpleSerDe class.
+     */
+    public void testLazySimpleSerDe() throws Throwable {
+        try {
+            // Create the SerDe
+            LazySimpleSerDe serDe = new LazySimpleSerDe();
+            Configuration conf = new Configuration();
+            Properties tbl = createProperties();
+            serDe.initialize(conf, tbl);
 
-			LazySerDe outputSerde = new LazySerDe();
-			outputSerde.initialize(conf, tbl);
+            LazySerDe outputSerde = new LazySerDe();
+            outputSerde.initialize(conf, tbl);
 
-			// Data
-			String s = "123\t456\t789\t1000\t5.3\thive and hadoop\t1\tqf";
+            // Data
+            String s = "123\t456\t789\t1000\t5.3\thive and hadoop\t1\tqf";
 
-			byte[] bytes = s.getBytes();
-			Writable bytesWritable = new BytesWritable(bytes);
+            byte[] bytes = s.getBytes();
+            Writable bytesWritable = new BytesWritable(bytes);
 
-			// Test
-			// deserializeAndSerialize(serDe, t, s, expectedFieldsData);
-			Object row = serDe.deserialize(bytesWritable); // test my serde
-			StructObjectInspector simpleInspector = (StructObjectInspector) serDe
-					.getObjectInspector();
-			List<Object> fields = simpleInspector
-					.getStructFieldsDataAsList(row);
-			List<? extends StructField> fieldRefs = simpleInspector
-					.getAllStructFieldRefs();
+            // Test
+            // deserializeAndSerialize(serDe, t, s, expectedFieldsData);
+            Object row = serDe.deserialize(bytesWritable); // test my serde
+            StructObjectInspector simpleInspector = (StructObjectInspector) serDe.getObjectInspector();
+            List<Object> fields = simpleInspector.getStructFieldsDataAsList(row);
+            List<? extends StructField> fieldRefs = simpleInspector.getAllStructFieldRefs();
 
-			int i = 0;
-			for (Object field : fields) {
-				BytesWritable fieldWritable = (BytesWritable) outputSerde
-						.serialize(field, fieldRefs.get(i)
-								.getFieldObjectInspector());
-				System.out.print(fieldWritable.getSize() + "|");
-				i++;
-			}
+            int i = 0;
+            for (Object field : fields) {
+                BytesWritable fieldWritable = (BytesWritable) outputSerde.serialize(field, fieldRefs.get(i)
+                        .getFieldObjectInspector());
+                System.out.print(fieldWritable.getSize() + "|");
+                i++;
+            }
 
-			// Writable output = outputSerde.serialize(row, serDe
-			// .getObjectInspector());
-			// System.out.println(output);
-			//
-			// Object row2 = outputSerde.deserialize(output);
-			// Writable output2 = serDe.serialize(row2, outputSerde
-			// .getObjectInspector());
-			// System.out.println(output2);
+            // Writable output = outputSerde.serialize(row, serDe
+            // .getObjectInspector());
+            // System.out.println(output);
+            //
+            // Object row2 = outputSerde.deserialize(output);
+            // Writable output2 = serDe.serialize(row2, outputSerde
+            // .getObjectInspector());
+            // System.out.println(output2);
 
-			// System.out.println(output);
-			// deserializeAndSerialize(outputSerde, t, s, expectedFieldsData);
+            // System.out.println(output);
+            // deserializeAndSerialize(outputSerde, t, s, expectedFieldsData);
 
-		} catch (Throwable e) {
-			e.printStackTrace();
-			throw e;
-		}
-	}
+        } catch (Throwable e) {
+            e.printStackTrace();
+            throw e;
+        }
+    }
 
-	private void deserializeAndSerialize(SerDe serDe, Text t, String s,
-			Object[] expectedFieldsData) throws SerDeException {
-		// Get the row structure
-		StructObjectInspector oi = (StructObjectInspector) serDe
-				.getObjectInspector();
-		List<? extends StructField> fieldRefs = oi.getAllStructFieldRefs();
-		assertEquals(8, fieldRefs.size());
+    private void deserializeAndSerialize(SerDe serDe, Text t, String s, Object[] expectedFieldsData)
+            throws SerDeException {
+        // Get the row structure
+        StructObjectInspector oi = (StructObjectInspector) serDe.getObjectInspector();
+        List<? extends StructField> fieldRefs = oi.getAllStructFieldRefs();
+        assertEquals(8, fieldRefs.size());
 
-		// Deserialize
-		Object row = serDe.deserialize(t);
-		for (int i = 0; i < fieldRefs.size(); i++) {
-			Object fieldData = oi.getStructFieldData(row, fieldRefs.get(i));
-			if (fieldData != null) {
-				fieldData = ((LazyPrimitive) fieldData).getWritableObject();
-			}
-			assertEquals("Field " + i, expectedFieldsData[i], fieldData);
-		}
-		// Serialize
-		assertEquals(Text.class, serDe.getSerializedClass());
-		Text serializedText = (Text) serDe.serialize(row, oi);
-		assertEquals("Serialized data", s, serializedText.toString());
-	}
+        // Deserialize
+        Object row = serDe.deserialize(t);
+        for (int i = 0; i < fieldRefs.size(); i++) {
+            Object fieldData = oi.getStructFieldData(row, fieldRefs.get(i));
+            if (fieldData != null) {
+                fieldData = ((LazyPrimitive) fieldData).getWritableObject();
+            }
+            assertEquals("Field " + i, expectedFieldsData[i], fieldData);
+        }
+        // Serialize
+        assertEquals(Text.class, serDe.getSerializedClass());
+        Text serializedText = (Text) serDe.serialize(row, oi);
+        assertEquals("Serialized data", s, serializedText.toString());
+    }
 
-	private Properties createProperties() {
-		Properties tbl = new Properties();
+    private Properties createProperties() {
+        Properties tbl = new Properties();
 
-		// Set the configuration parameters
-		tbl.setProperty(Constants.SERIALIZATION_FORMAT, "9");
-		tbl.setProperty("columns",
-				"abyte,ashort,aint,along,adouble,astring,anullint,anullstring");
-		tbl.setProperty("columns.types",
-				"tinyint:smallint:int:bigint:double:string:int:string");
-		tbl.setProperty(Constants.SERIALIZATION_NULL_FORMAT, "NULL");
-		return tbl;
-	}
+        // Set the configuration parameters
+        tbl.setProperty(Constants.SERIALIZATION_FORMAT, "9");
+        tbl.setProperty("columns", "abyte,ashort,aint,along,adouble,astring,anullint,anullstring");
+        tbl.setProperty("columns.types", "tinyint:smallint:int:bigint:double:string:int:string");
+        tbl.setProperty(Constants.SERIALIZATION_NULL_FORMAT, "NULL");
+        return tbl;
+    }
 
-	/**
-	 * Test the LazySimpleSerDe class with LastColumnTakesRest option.
-	 */
-	public void testLazySimpleSerDeLastColumnTakesRest() throws Throwable {
-		try {
-			// Create the SerDe
-			LazySimpleSerDe serDe = new LazySimpleSerDe();
-			Configuration conf = new Configuration();
-			Properties tbl = createProperties();
-			tbl.setProperty(Constants.SERIALIZATION_LAST_COLUMN_TAKES_REST,
-					"true");
-			serDe.initialize(conf, tbl);
+    /**
+     * Test the LazySimpleSerDe class with LastColumnTakesRest option.
+     */
+    public void testLazySimpleSerDeLastColumnTakesRest() throws Throwable {
+        try {
+            // Create the SerDe
+            LazySimpleSerDe serDe = new LazySimpleSerDe();
+            Configuration conf = new Configuration();
+            Properties tbl = createProperties();
+            tbl.setProperty(Constants.SERIALIZATION_LAST_COLUMN_TAKES_REST, "true");
+            serDe.initialize(conf, tbl);
 
-			// Data
-			Text t = new Text(
-					"123\t456\t789\t1000\t5.3\thive and hadoop\t1.\ta\tb\t");
-			String s = "123\t456\t789\t1000\t5.3\thive and hadoop\tNULL\ta\tb\t";
-			Object[] expectedFieldsData = { new ByteWritable((byte) 123),
-					new ShortWritable((short) 456), new IntWritable(789),
-					new LongWritable(1000), new DoubleWritable(5.3),
-					new Text("hive and hadoop"), null, new Text("a\tb\t") };
+            // Data
+            Text t = new Text("123\t456\t789\t1000\t5.3\thive and hadoop\t1.\ta\tb\t");
+            String s = "123\t456\t789\t1000\t5.3\thive and hadoop\tNULL\ta\tb\t";
+            Object[] expectedFieldsData = { new ByteWritable((byte) 123), new ShortWritable((short) 456),
+                    new IntWritable(789), new LongWritable(1000), new DoubleWritable(5.3), new Text("hive and hadoop"),
+                    null, new Text("a\tb\t") };
 
-			// Test
-			deserializeAndSerialize(serDe, t, s, expectedFieldsData);
+            // Test
+            deserializeAndSerialize(serDe, t, s, expectedFieldsData);
 
-		} catch (Throwable e) {
-			e.printStackTrace();
-			throw e;
-		}
-	}
+        } catch (Throwable e) {
+            e.printStackTrace();
+            throw e;
+        }
+    }
 
-	/**
-	 * Test the LazySimpleSerDe class with extra columns.
-	 */
-	public void testLazySimpleSerDeExtraColumns() throws Throwable {
-		try {
-			// Create the SerDe
-			LazySimpleSerDe serDe = new LazySimpleSerDe();
-			Configuration conf = new Configuration();
-			Properties tbl = createProperties();
-			serDe.initialize(conf, tbl);
+    /**
+     * Test the LazySimpleSerDe class with extra columns.
+     */
+    public void testLazySimpleSerDeExtraColumns() throws Throwable {
+        try {
+            // Create the SerDe
+            LazySimpleSerDe serDe = new LazySimpleSerDe();
+            Configuration conf = new Configuration();
+            Properties tbl = createProperties();
+            serDe.initialize(conf, tbl);
 
-			// Data
-			Text t = new Text(
-					"123\t456\t789\t1000\t5.3\thive and hadoop\t1.\ta\tb\t");
-			String s = "123\t456\t789\t1000\t5.3\thive and hadoop\tNULL\ta";
-			Object[] expectedFieldsData = { new ByteWritable((byte) 123),
-					new ShortWritable((short) 456), new IntWritable(789),
-					new LongWritable(1000), new DoubleWritable(5.3),
-					new Text("hive and hadoop"), null, new Text("a") };
+            // Data
+            Text t = new Text("123\t456\t789\t1000\t5.3\thive and hadoop\t1.\ta\tb\t");
+            String s = "123\t456\t789\t1000\t5.3\thive and hadoop\tNULL\ta";
+            Object[] expectedFieldsData = { new ByteWritable((byte) 123), new ShortWritable((short) 456),
+                    new IntWritable(789), new LongWritable(1000), new DoubleWritable(5.3), new Text("hive and hadoop"),
+                    null, new Text("a") };
 
-			// Test
-			deserializeAndSerialize(serDe, t, s, expectedFieldsData);
+            // Test
+            deserializeAndSerialize(serDe, t, s, expectedFieldsData);
 
-		} catch (Throwable e) {
-			e.printStackTrace();
-			throw e;
-		}
-	}
+        } catch (Throwable e) {
+            e.printStackTrace();
+            throw e;
+        }
+    }
 
-	/**
-	 * Test the LazySimpleSerDe class with missing columns.
-	 */
-	public void testLazySimpleSerDeMissingColumns() throws Throwable {
-		try {
-			// Create the SerDe
-			LazySimpleSerDe serDe = new LazySimpleSerDe();
-			Configuration conf = new Configuration();
-			Properties tbl = createProperties();
-			serDe.initialize(conf, tbl);
+    /**
+     * Test the LazySimpleSerDe class with missing columns.
+     */
+    public void testLazySimpleSerDeMissingColumns() throws Throwable {
+        try {
+            // Create the SerDe
+            LazySimpleSerDe serDe = new LazySimpleSerDe();
+            Configuration conf = new Configuration();
+            Properties tbl = createProperties();
+            serDe.initialize(conf, tbl);
 
-			// Data
-			Text t = new Text("123\t456\t789\t1000\t5.3\t");
-			String s = "123\t456\t789\t1000\t5.3\t\tNULL\tNULL";
-			Object[] expectedFieldsData = { new ByteWritable((byte) 123),
-					new ShortWritable((short) 456), new IntWritable(789),
-					new LongWritable(1000), new DoubleWritable(5.3),
-					new Text(""), null, null };
+            // Data
+            Text t = new Text("123\t456\t789\t1000\t5.3\t");
+            String s = "123\t456\t789\t1000\t5.3\t\tNULL\tNULL";
+            Object[] expectedFieldsData = { new ByteWritable((byte) 123), new ShortWritable((short) 456),
+                    new IntWritable(789), new LongWritable(1000), new DoubleWritable(5.3), new Text(""), null, null };
 
-			// Test
-			deserializeAndSerialize(serDe, t, s, expectedFieldsData);
+            // Test
+            deserializeAndSerialize(serDe, t, s, expectedFieldsData);
 
-		} catch (Throwable e) {
-			e.printStackTrace();
-			throw e;
-		}
-	}
+        } catch (Throwable e) {
+            e.printStackTrace();
+            throw e;
+        }
+    }
 
 }