Change default Hadoop version to 2.2.0

This change moves the default version of Hadoop to 2.2.0, as well as introducing some dependencyManagement and
property tags to ease management of this and other similar dependencies. Additionally, in this change Hivesterix
and Pregelix are no longer built by default (as part of moving their codebases to separate repositories).

Change-Id: I7609f192cbb246fdec2d0f301130c389688047a9
Reviewed-on: http://fulliautomatix.ics.uci.edu:8443/124
Tested-by: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Reviewed-by: Till Westmann <westmann@gmail.com>
diff --git a/hyracks/hyracks-control/hyracks-control-common/pom.xml b/hyracks/hyracks-control/hyracks-control-common/pom.xml
index 7be8fea..70e3f21 100644
--- a/hyracks/hyracks-control/hyracks-control-common/pom.xml
+++ b/hyracks/hyracks-control/hyracks-control-common/pom.xml
@@ -48,7 +48,6 @@
   	<dependency>
   		<groupId>commons-io</groupId>
   		<artifactId>commons-io</artifactId>
-  		<version>1.4</version>
   		<type>jar</type>
   		<scope>compile</scope>
   	</dependency>
diff --git a/hyracks/hyracks-dataflow-hadoop/pom.xml b/hyracks/hyracks-dataflow-hadoop/pom.xml
index 90549d8..a910d2e 100644
--- a/hyracks/hyracks-dataflow-hadoop/pom.xml
+++ b/hyracks/hyracks-dataflow-hadoop/pom.xml
@@ -52,9 +52,15 @@
   		<scope>compile</scope>
   	</dependency>
   	<dependency>
+		<groupId>edu.uci.ics.hyracks</groupId>
+		<artifactId>hyracks-hdfs-2.x</artifactId>
+		<version>0.2.14-SNAPSHOT</version>
+		<type>jar</type>
+		<scope>compile</scope>
+	</dependency>
+  	<dependency>
   		<groupId>org.apache.hadoop</groupId>
-  		<artifactId>hadoop-core</artifactId>
-  		<version>0.20.2</version>
+  		<artifactId>hadoop-client</artifactId>
   		<type>jar</type>
   		<scope>compile</scope>
   	</dependency>
diff --git a/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/AbstractHadoopOperatorDescriptor.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/AbstractHadoopOperatorDescriptor.java
index cc2ce9b..7a677665 100644
--- a/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/AbstractHadoopOperatorDescriptor.java
+++ b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/AbstractHadoopOperatorDescriptor.java
@@ -18,11 +18,11 @@
 import java.util.Map;
 import java.util.StringTokenizer;
 
+import org.apache.hadoop.mapred.Counters.Counter;
 import org.apache.hadoop.mapred.InputSplit;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reporter;
-import org.apache.hadoop.mapred.Counters.Counter;
 
 import edu.uci.ics.dcache.client.DCacheClient;
 import edu.uci.ics.hyracks.api.dataflow.IDataWriter;
@@ -63,8 +63,8 @@
     private final Map<String, String> jobConfMap;
     private IHadoopClassFactory hadoopClassFactory;
 
-    public AbstractHadoopOperatorDescriptor(IOperatorDescriptorRegistry spec, int inputArity, RecordDescriptor recordDescriptor,
-            JobConf jobConf, IHadoopClassFactory hadoopOperatorFactory) {
+    public AbstractHadoopOperatorDescriptor(IOperatorDescriptorRegistry spec, int inputArity,
+            RecordDescriptor recordDescriptor, JobConf jobConf, IHadoopClassFactory hadoopOperatorFactory) {
         super(spec, inputArity, 1);
         jobConfMap = DatatypeHelper.jobConf2Map(jobConf);
         this.hadoopClassFactory = hadoopOperatorFactory;
@@ -119,6 +119,11 @@
             public void setStatus(String status) {
 
             }
+
+            @Override
+            public float getProgress() {
+                return 0.0f;
+            }
         };
     }
 
diff --git a/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopMapperOperatorDescriptor.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopMapperOperatorDescriptor.java
index 9f0e298..0d44b42 100644
--- a/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopMapperOperatorDescriptor.java
+++ b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopMapperOperatorDescriptor.java
@@ -45,9 +45,11 @@
 import edu.uci.ics.hyracks.dataflow.hadoop.util.DatatypeHelper;
 import edu.uci.ics.hyracks.dataflow.hadoop.util.IHadoopClassFactory;
 import edu.uci.ics.hyracks.dataflow.hadoop.util.InputSplitsProxy;
+import edu.uci.ics.hyracks.dataflow.hadoop.util.MRContextUtil;
 import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
 import edu.uci.ics.hyracks.dataflow.std.base.IOpenableDataWriterOperator;
 import edu.uci.ics.hyracks.dataflow.std.util.DeserializedOperatorNodePushable;
+import edu.uci.ics.hyracks.hdfs.ContextFactory;
 
 public class HadoopMapperOperatorDescriptor<K1, V1, K2, V2> extends AbstractHadoopOperatorDescriptor {
 
@@ -233,7 +235,7 @@
                     };;;
 
                     OutputCommitter outputCommitter = new org.apache.hadoop.mapreduce.lib.output.NullOutputFormat()
-                            .getOutputCommitter(new TaskAttemptContext(conf, new TaskAttemptID()));
+                            .getOutputCommitter(new ContextFactory().createContext(conf, new TaskAttemptID()));
                     StatusReporter statusReporter = new StatusReporter() {
                         @Override
                         public void setStatus(String arg0) {
@@ -252,10 +254,15 @@
                         public Counter getCounter(Enum<?> arg0) {
                             return null;
                         }
+
+                        @Override
+                        public float getProgress() {
+                            // TODO Auto-generated method stub
+                            return 0;
+                        }
                     };;;
-                    context = new org.apache.hadoop.mapreduce.Mapper().new Context(conf, new TaskAttemptID(),
-                            newReader, recordWriter, outputCommitter, statusReporter,
-                            (org.apache.hadoop.mapreduce.InputSplit) inputSplit);
+                    context = new MRContextUtil().createMapContext(conf, new TaskAttemptID(), newReader, recordWriter,
+                            outputCommitter, statusReporter, (org.apache.hadoop.mapreduce.InputSplit) inputSplit);
                     newReader.initialize((org.apache.hadoop.mapreduce.InputSplit) inputSplit, context);
                     ((org.apache.hadoop.mapreduce.Mapper) mapper).run(context);
                 } else {
@@ -343,7 +350,7 @@
         } else {
             String mapperClassName = null;
             if (jobConf.getUseNewMapper()) {
-                JobContext jobContext = new JobContext(conf, null);
+                JobContext jobContext = new ContextFactory().createJobContext(conf);
                 mapperClass = jobContext.getMapperClass();
                 mapperClassName = mapperClass.getName();
             } else {
@@ -358,11 +365,10 @@
     private Object getRecordReader(JobConf conf, Object inputSplit) throws ClassNotFoundException, IOException,
             InterruptedException {
         if (conf.getUseNewMapper()) {
-            JobContext context = new JobContext(conf, null);
+            JobContext context = new ContextFactory().createJobContext(conf);
             org.apache.hadoop.mapreduce.InputFormat inputFormat = (org.apache.hadoop.mapreduce.InputFormat) ReflectionUtils
                     .newInstance(context.getInputFormatClass(), conf);
-            TaskAttemptContext taskAttemptContext = new org.apache.hadoop.mapreduce.TaskAttemptContext(conf,
-                    new TaskAttemptID());
+            TaskAttemptContext taskAttemptContext = new ContextFactory().createContext(conf, new TaskAttemptID());
             return inputFormat.createRecordReader((org.apache.hadoop.mapreduce.InputSplit) inputSplit,
                     taskAttemptContext);
         } else {
@@ -389,7 +395,7 @@
                 if (conf.getUseNewMapper()) {
                     org.apache.hadoop.mapreduce.RecordReader newReader = (org.apache.hadoop.mapreduce.RecordReader) reader;
                     newReader.initialize((org.apache.hadoop.mapreduce.InputSplit) inputSplits[partition],
-                            new TaskAttemptContext(conf, new TaskAttemptID()));
+                            new ContextFactory().createContext(conf, new TaskAttemptID()));
                     newReader.nextKeyValue();
                     Object key = newReader.getCurrentKey();
                     Class keyClass = null;
diff --git a/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopReadOperatorDescriptor.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopReadOperatorDescriptor.java
index fa8ac14..9bce999 100644
--- a/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopReadOperatorDescriptor.java
+++ b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopReadOperatorDescriptor.java
@@ -59,6 +59,7 @@
 import edu.uci.ics.hyracks.dataflow.hadoop.util.InputSplitsProxy;
 import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
+import edu.uci.ics.hyracks.hdfs.ContextFactory;
 
 public class HadoopReadOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
     private static final long serialVersionUID = 1L;
@@ -96,10 +97,10 @@
             InterruptedException {
         RecordReader hadoopRecordReader = null;
         if (conf.getUseNewMapper()) {
-            JobContext context = new JobContext(conf, null);
+            JobContext context = new ContextFactory().createJobContext(conf);
             org.apache.hadoop.mapreduce.InputFormat inputFormat = (org.apache.hadoop.mapreduce.InputFormat) ReflectionUtils
                     .newInstance(context.getInputFormatClass(), conf);
-            TaskAttemptContext taskAttemptContext = new org.apache.hadoop.mapreduce.TaskAttemptContext(jobConf, null);
+            TaskAttemptContext taskAttemptContext = new ContextFactory().createContext(jobConf, null);
             hadoopRecordReader = (RecordReader) inputFormat.createRecordReader(
                     (org.apache.hadoop.mapreduce.InputSplit) inputSplit, taskAttemptContext);
         } else {
@@ -151,6 +152,12 @@
             public void setStatus(String status) {
 
             }
+
+            @Override
+            public float getProgress() {
+                // TODO Auto-generated method stub
+                return 0;
+            }
         };
     }
 
@@ -173,11 +180,10 @@
                     Object inputSplit = splits[partition];
 
                     if (conf.getUseNewMapper()) {
-                        JobContext context = new JobContext(conf, null);
+                        JobContext context = new ContextFactory().createJobContext(conf);
                         org.apache.hadoop.mapreduce.InputFormat inputFormat = (org.apache.hadoop.mapreduce.InputFormat) ReflectionUtils
                                 .newInstance(context.getInputFormatClass(), conf);
-                        TaskAttemptContext taskAttemptContext = new org.apache.hadoop.mapreduce.TaskAttemptContext(
-                                jobConf, null);
+                        TaskAttemptContext taskAttemptContext = new ContextFactory().createContext(jobConf, null);
                         hadoopRecordReader = (RecordReader) inputFormat.createRecordReader(
                                 (org.apache.hadoop.mapreduce.InputSplit) inputSplit, taskAttemptContext);
                     } else {
@@ -221,7 +227,8 @@
                                 FrameUtils.flushFrame(outBuffer, writer);
                                 appender.reset(outBuffer, true);
                                 if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
-                                    throw new HyracksDataException("Record size (" + tb.getSize() + ") larger than frame size (" + outBuffer.capacity() + ")");
+                                    throw new HyracksDataException("Record size (" + tb.getSize()
+                                            + ") larger than frame size (" + outBuffer.capacity() + ")");
                                 }
                             }
                         }
diff --git a/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopReducerOperatorDescriptor.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopReducerOperatorDescriptor.java
index feccf30..9c75632 100644
--- a/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopReducerOperatorDescriptor.java
+++ b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopReducerOperatorDescriptor.java
@@ -48,10 +48,12 @@
 import edu.uci.ics.hyracks.dataflow.hadoop.data.RawComparingComparatorFactory;
 import edu.uci.ics.hyracks.dataflow.hadoop.util.DatatypeHelper;
 import edu.uci.ics.hyracks.dataflow.hadoop.util.IHadoopClassFactory;
+import edu.uci.ics.hyracks.dataflow.hadoop.util.MRContextUtil;
 import edu.uci.ics.hyracks.dataflow.std.base.IOpenableDataWriterOperator;
 import edu.uci.ics.hyracks.dataflow.std.group.DeserializedPreclusteredGroupOperator;
 import edu.uci.ics.hyracks.dataflow.std.group.IGroupAggregator;
 import edu.uci.ics.hyracks.dataflow.std.util.DeserializedOperatorNodePushable;
+import edu.uci.ics.hyracks.hdfs.ContextFactory;
 
 public class HadoopReducerOperatorDescriptor<K2, V2, K3, V3> extends AbstractHadoopOperatorDescriptor {
     private class ReducerAggregator implements IGroupAggregator {
@@ -87,16 +89,16 @@
             }
         };
 
-        class ReducerContext extends org.apache.hadoop.mapreduce.Reducer.Context {
+        class ReducerContext extends org.apache.hadoop.mapreduce.lib.reduce.WrappedReducer.Context {
             private HadoopReducerOperatorDescriptor.ValueIterator iterator;
 
             @SuppressWarnings("unchecked")
             ReducerContext(org.apache.hadoop.mapreduce.Reducer reducer, JobConf conf) throws IOException,
                     InterruptedException, ClassNotFoundException {
-
-                reducer.super(conf, new TaskAttemptID(), rawKeyValueIterator, null, null, null, null, null, null, Class
-                        .forName("org.apache.hadoop.io.NullWritable"), Class
-                        .forName("org.apache.hadoop.io.NullWritable"));
+                ((org.apache.hadoop.mapreduce.lib.reduce.WrappedReducer) reducer).super(new MRContextUtil()
+                        .createReduceContext(conf, new TaskAttemptID(), rawKeyValueIterator, null, null, null, null,
+                                null, null, Class.forName("org.apache.hadoop.io.NullWritable"),
+                                Class.forName("org.apache.hadoop.io.NullWritable")));
             }
 
             public void setIterator(HadoopReducerOperatorDescriptor.ValueIterator iter) {
@@ -190,6 +192,12 @@
                 public Counter getCounter(Enum<?> arg0) {
                     return null;
                 }
+
+                @Override
+                public float getProgress() {
+                    // TODO Auto-generated method stub
+                    return 0;
+                }
             };
         }
 
@@ -325,7 +333,7 @@
             Object reducer;
             if (!useAsCombiner) {
                 if (getJobConf().getUseNewReducer()) {
-                    JobContext jobContext = new JobContext(getJobConf(), null);
+                    JobContext jobContext = new ContextFactory().createJobContext(getJobConf());
                     reducerClass = (Class<? extends org.apache.hadoop.mapreduce.Reducer<?, ?, ?, ?>>) jobContext
                             .getReducerClass();
                 } else {
@@ -333,7 +341,7 @@
                 }
             } else {
                 if (getJobConf().getUseNewReducer()) {
-                    JobContext jobContext = new JobContext(getJobConf(), null);
+                    JobContext jobContext = new ContextFactory().createJobContext(getJobConf());
                     reducerClass = (Class<? extends org.apache.hadoop.mapreduce.Reducer<?, ?, ?, ?>>) jobContext
                             .getCombinerClass();
                 } else {
@@ -382,7 +390,7 @@
         String outputValueClassName = null;
 
         if (conf.getUseNewMapper()) {
-            JobContext context = new JobContext(conf, null);
+            JobContext context = new ContextFactory().createJobContext(conf);
             outputKeyClassName = context.getOutputKeyClass().getName();
             outputValueClassName = context.getOutputValueClass().getName();
         } else {
diff --git a/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopWriteOperatorDescriptor.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopWriteOperatorDescriptor.java
index 433b36c..f1f06be 100644
--- a/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopWriteOperatorDescriptor.java
+++ b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopWriteOperatorDescriptor.java
@@ -21,14 +21,13 @@
 
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.Counters.Counter;
 import org.apache.hadoop.mapred.FileOutputCommitter;
 import org.apache.hadoop.mapred.FileOutputFormat;
 import org.apache.hadoop.mapred.InputSplit;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.Reporter;
-import org.apache.hadoop.mapred.Counters.Counter;
 import org.apache.hadoop.mapred.lib.NullOutputFormat;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.util.ReflectionUtils;
@@ -39,6 +38,7 @@
 import edu.uci.ics.hyracks.dataflow.std.file.AbstractFileWriteOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
 import edu.uci.ics.hyracks.dataflow.std.file.IRecordWriter;
+import edu.uci.ics.hyracks.hdfs.ContextFactory;
 
 public class HadoopWriteOperatorDescriptor extends AbstractFileWriteOperatorDescriptor {
 
@@ -74,11 +74,11 @@
                 tempOutputFile = new Path(tempOutputFile, suffix);
                 finalOutputFile = new Path(outputPath, suffix);
                 if (conf.getUseNewMapper()) {
-                    org.apache.hadoop.mapreduce.JobContext jobContext = new org.apache.hadoop.mapreduce.JobContext(
-                            conf, null);
+                    org.apache.hadoop.mapreduce.JobContext jobContext = new ContextFactory().createJobContext(conf);
                     org.apache.hadoop.mapreduce.OutputFormat newOutputFormat = (org.apache.hadoop.mapreduce.OutputFormat) ReflectionUtils
                             .newInstance(jobContext.getOutputFormatClass(), conf);
-                    recordWriter = newOutputFormat.getRecordWriter(new TaskAttemptContext(conf, taskAttempId));
+                    recordWriter = newOutputFormat.getRecordWriter(new ContextFactory().createContext(conf,
+                            taskAttempId));
                 } else {
                     recordWriter = conf.getOutputFormat().getRecordWriter(FileSystem.get(conf), conf, suffix,
                             new Progressable() {
@@ -106,8 +106,8 @@
             try {
                 if (recordWriter != null) {
                     if (conf.getUseNewMapper()) {
-                        ((org.apache.hadoop.mapreduce.RecordWriter) recordWriter).close(new TaskAttemptContext(conf,
-                                new TaskAttemptID()));
+                        ((org.apache.hadoop.mapreduce.RecordWriter) recordWriter).close(new ContextFactory()
+                                .createContext(conf, new TaskAttemptID()));
                     } else {
                         ((org.apache.hadoop.mapred.RecordWriter) recordWriter).close(null);
                     }
@@ -175,6 +175,12 @@
             public void setStatus(String status) {
 
             }
+
+            @Override
+            public float getProgress() {
+                // TODO Auto-generated method stub
+                return 0;
+            }
         };
     }
 
@@ -199,7 +205,7 @@
         int numOutputters = conf.getNumReduceTasks() != 0 ? conf.getNumReduceTasks() : noOfMappers;
         Object outputFormat = null;
         if (conf.getUseNewMapper()) {
-            outputFormat = ReflectionUtils.newInstance(new org.apache.hadoop.mapreduce.JobContext(conf, null)
+            outputFormat = ReflectionUtils.newInstance(new ContextFactory().createJobContext(conf)
                     .getOutputFormatClass(), conf);
         } else {
             outputFormat = conf.getOutputFormat();
@@ -226,7 +232,8 @@
         }
     }
 
-    public HadoopWriteOperatorDescriptor(IOperatorDescriptorRegistry jobSpec, JobConf jobConf, int numMapTasks) throws Exception {
+    public HadoopWriteOperatorDescriptor(IOperatorDescriptorRegistry jobSpec, JobConf jobConf, int numMapTasks)
+            throws Exception {
         super(jobSpec, getOutputSplits(jobConf, numMapTasks));
         this.jobConfMap = DatatypeHelper.jobConf2Map(jobConf);
         checkIfCanWriteToHDFS(super.splits);
diff --git a/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/HadoopHelper.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/HadoopHelper.java
index ddc080b..5ac55ff 100644
--- a/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/HadoopHelper.java
+++ b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/HadoopHelper.java
@@ -43,6 +43,7 @@
 import edu.uci.ics.hyracks.dataflow.hadoop.data.HadoopNewPartitionerTuplePartitionComputerFactory;
 import edu.uci.ics.hyracks.dataflow.hadoop.data.WritableComparingBinaryComparatorFactory;
 import edu.uci.ics.hyracks.dataflow.hadoop.util.DatatypeHelper;
+import edu.uci.ics.hyracks.hdfs.ContextFactory;
 
 public class HadoopHelper {
     public static final int KEY_FIELD_INDEX = 0;
@@ -97,11 +98,14 @@
         }
     }
 
-    public TaskAttemptContext createTaskAttemptContext(TaskAttemptID taId) {
+    public TaskAttemptContext createTaskAttemptContext(TaskAttemptID taId) throws HyracksDataException {
         ClassLoader ctxCL = Thread.currentThread().getContextClassLoader();
         try {
             Thread.currentThread().setContextClassLoader(config.getClassLoader());
-            return new TaskAttemptContext(config, taId);
+            return new ContextFactory().createContext(config, taId);
+        } catch (HyracksDataException e) {
+            e.printStackTrace();
+            throw new HyracksDataException(e);
         } finally {
             Thread.currentThread().setContextClassLoader(ctxCL);
         }
@@ -111,7 +115,7 @@
         ClassLoader ctxCL = Thread.currentThread().getContextClassLoader();
         try {
             Thread.currentThread().setContextClassLoader(config.getClassLoader());
-            return new JobContext(config, null);
+            return new ContextFactory().createJobContext(config);
         } finally {
             Thread.currentThread().setContextClassLoader(ctxCL);
         }
@@ -166,7 +170,7 @@
         try {
             Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
             InputFormat<K, V> fmt = getInputFormat();
-            JobContext jCtx = new JobContext(config, null);
+            JobContext jCtx = new ContextFactory().createJobContext(config);
             try {
                 return fmt.getSplits(jCtx);
             } catch (IOException e) {
diff --git a/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/MapperOperatorDescriptor.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/MapperOperatorDescriptor.java
index 31cd29b..2bef21a 100644
--- a/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/MapperOperatorDescriptor.java
+++ b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/MapperOperatorDescriptor.java
@@ -46,6 +46,7 @@
 import edu.uci.ics.hyracks.dataflow.std.sort.Algorithm;
 import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortRunGenerator;
 import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortRunMerger;
+import edu.uci.ics.hyracks.dataflow.hadoop.util.MRContextUtil;
 
 public class MapperOperatorDescriptor<K1 extends Writable, V1 extends Writable, K2 extends Writable, V2 extends Writable>
         extends AbstractSingleActivityOperatorDescriptor {
@@ -252,7 +253,7 @@
                                 Thread.currentThread().setContextClassLoader(ctxCL);
                             }
                             recordWriter.initBlock(blockId);
-                            Mapper<K1, V1, K2, V2>.Context mCtx = mapper.new Context(conf, taId, recordReader,
+                            Mapper<K1, V1, K2, V2>.Context mCtx = new MRContextUtil().createMapContext(conf, taId, recordReader,
                                     recordWriter, null, null, split);
                             mapper.run(mCtx);
                             recordReader.close();
diff --git a/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/ReduceWriter.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/ReduceWriter.java
index f84ffda3..af1b731 100644
--- a/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/ReduceWriter.java
+++ b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/ReduceWriter.java
@@ -24,6 +24,7 @@
 import org.apache.hadoop.mapreduce.Reducer;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.counters.GenericCounter;
 
 import edu.uci.ics.hyracks.api.comm.IFrameWriter;
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
@@ -34,6 +35,7 @@
 import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
 import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
+import edu.uci.ics.hyracks.dataflow.hadoop.util.MRContextUtil;
 
 public class ReduceWriter<K2, V2, K3, V3> implements IFrameWriter {
     private final IHyracksTaskContext ctx;
@@ -88,10 +90,8 @@
         bPtr = 0;
         group.add(ctx.allocateFrame());
         fta = new FrameTupleAppender(ctx.getFrameSize());
-        keyCounter = new Counter() {
-        };
-        valueCounter = new Counter() {
-        };
+        keyCounter = new GenericCounter();
+        valueCounter = new GenericCounter();
     }
 
     @Override
@@ -146,7 +146,7 @@
     private void reduce() throws HyracksDataException {
         kvi.reset(group, bPtr + 1);
         try {
-            Reducer<K2, V2, K3, V3>.Context rCtx = reducer.new Context(helper.getConfiguration(), taId, kvi,
+            Reducer<K2, V2, K3, V3>.Context rCtx = new MRContextUtil().createReduceContext(helper.getConfiguration(), taId, kvi,
                     keyCounter, valueCounter, recordWriter, null, null,
                     (RawComparator<K2>) helper.getRawGroupingComparator(), (Class<K2>) helper.getJob()
                             .getMapOutputKeyClass(), (Class<V2>) helper.getJob().getMapOutputValueClass());
diff --git a/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/util/MRContextUtil.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/util/MRContextUtil.java
new file mode 100644
index 0000000..060f76f
--- /dev/null
+++ b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/util/MRContextUtil.java
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.dataflow.hadoop.util;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.RawComparator;
+import org.apache.hadoop.mapred.RawKeyValueIterator;
+import org.apache.hadoop.mapreduce.Counter;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.OutputCommitter;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.RecordWriter;
+import org.apache.hadoop.mapreduce.Reducer;
+import org.apache.hadoop.mapreduce.StatusReporter;
+import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.lib.map.WrappedMapper;
+import org.apache.hadoop.mapreduce.lib.reduce.WrappedReducer;
+import org.apache.hadoop.mapreduce.task.MapContextImpl;
+import org.apache.hadoop.mapreduce.task.ReduceContextImpl;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+/**
+ * The wrapper to generate TaskTattemptContext
+ */
+public class MRContextUtil {
+    //	public static Reducer.Context = create
+
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    public Mapper.Context createMapContext(Configuration conf, TaskAttemptID taskid, RecordReader reader,
+            RecordWriter writer, OutputCommitter committer, StatusReporter reporter, InputSplit split) {
+        return new WrappedMapper().getMapContext(new MapContextImpl(conf, taskid, reader, writer, committer, reporter,
+                split));
+    }
+
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    public Reducer.Context createReduceContext(Configuration conf, TaskAttemptID taskid, RawKeyValueIterator input,
+            Counter inputKeyCounter, Counter inputValueCounter, RecordWriter output, OutputCommitter committer,
+            StatusReporter reporter, RawComparator comparator, Class keyClass, Class valueClass)
+            throws HyracksDataException {
+        try {
+            return new WrappedReducer().getReducerContext(new ReduceContextImpl(conf, taskid, input, inputKeyCounter,
+                    inputValueCounter, output, committer, reporter, comparator, keyClass, valueClass));
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-dataflow-std/pom.xml b/hyracks/hyracks-dataflow-std/pom.xml
index 058ddc7..ce66296 100644
--- a/hyracks/hyracks-dataflow-std/pom.xml
+++ b/hyracks/hyracks-dataflow-std/pom.xml
@@ -55,7 +55,6 @@
   	<dependency>
   		<groupId>commons-io</groupId>
   		<artifactId>commons-io</artifactId>
-  		<version>2.4</version>
   	</dependency>
   </dependencies>
 </project>
diff --git a/hyracks/hyracks-examples/hadoop-compat-example/pom.xml b/hyracks/hyracks-examples/hadoop-compat-example/pom.xml
index 8bc7704..bbe34e7 100644
--- a/hyracks/hyracks-examples/hadoop-compat-example/pom.xml
+++ b/hyracks/hyracks-examples/hadoop-compat-example/pom.xml
@@ -34,8 +34,7 @@
   <dependencies>
       <dependency>
          <groupId>org.apache.hadoop</groupId>
-         <artifactId>hadoop-core</artifactId>
-         <version>0.20.2</version>
+         <artifactId>hadoop-client</artifactId>
          <type>jar</type>
          <scope>compile</scope>
       </dependency>
diff --git a/hyracks/hyracks-hadoop-compat/pom.xml b/hyracks/hyracks-hadoop-compat/pom.xml
index 3356611..ea5729a 100644
--- a/hyracks/hyracks-hadoop-compat/pom.xml
+++ b/hyracks/hyracks-hadoop-compat/pom.xml
@@ -80,8 +80,7 @@
   <dependencies>
   	<dependency>
   		<groupId>org.apache.hadoop</groupId>
-  		<artifactId>hadoop-core</artifactId>
-  		<version>0.20.2</version>
+  		<artifactId>hadoop-client</artifactId>
   		<type>jar</type>
   		<scope>compile</scope>
   	</dependency>
diff --git a/hyracks/hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/client/HyracksRunningJob.java b/hyracks/hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/client/HyracksRunningJob.java
index 4766cb9..ba6b986 100644
--- a/hyracks/hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/client/HyracksRunningJob.java
+++ b/hyracks/hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/client/HyracksRunningJob.java
@@ -16,8 +16,10 @@
 
 import java.io.IOException;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapred.Counters;
 import org.apache.hadoop.mapred.JobID;
+import org.apache.hadoop.mapred.JobStatus;
 import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hadoop.mapred.TaskAttemptID;
 import org.apache.hadoop.mapred.TaskCompletionEvent;
@@ -169,4 +171,34 @@
 
     }
 
+    @Override
+    public Configuration getConfiguration() {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public String getFailureInfo() throws IOException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public String getHistoryUrl() throws IOException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public JobStatus getJobStatus() throws IOException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public boolean isRetired() throws IOException {
+        // TODO Auto-generated method stub
+        return false;
+    }
+
 }
diff --git a/hyracks/hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/util/HadoopAdapter.java b/hyracks/hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/util/HadoopAdapter.java
index 3cab22d..d72b44d 100644
--- a/hyracks/hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/util/HadoopAdapter.java
+++ b/hyracks/hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/util/HadoopAdapter.java
@@ -33,7 +33,8 @@
 import org.apache.hadoop.mapred.InputSplit;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.Partitioner;
-import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapred.JobContext;
+import org.apache.hadoop.mapred.JobContextImpl;
 import org.apache.hadoop.util.ReflectionUtils;
 
 import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
@@ -110,8 +111,8 @@
 	public JobConf getConf() {
 		return jobConf;
 	}
-
-	public static VersionedProtocol getProtocol(Class protocolClass,
+	//TODO: Why is there now a type mismatch? Why does a bounded wildcard fix it?
+	public static VersionedProtocol getProtocol(Class<? extends VersionedProtocol> protocolClass,
 			InetSocketAddress inetAddress, JobConf jobConf) throws IOException {
 		VersionedProtocol versionedProtocol = RPC.getProxy(protocolClass,
 				ClientProtocol.versionID, inetAddress, jobConf);
@@ -144,7 +145,7 @@
 			JobConf conf) throws ClassNotFoundException, IOException,
 			InterruptedException {
 		org.apache.hadoop.mapreduce.InputSplit[] splits = null;
-		JobContext context = new JobContext(conf, null);
+		JobContext context = new JobContextImpl(conf, null);
 		org.apache.hadoop.mapreduce.InputFormat inputFormat = ReflectionUtils
 				.newInstance(context.getInputFormatClass(), conf);
 		List<org.apache.hadoop.mapreduce.InputSplit> inputSplits = inputFormat
diff --git a/hyracks/hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/util/Utilities.java b/hyracks/hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/util/Utilities.java
index d80f55d..8aad37e 100644
--- a/hyracks/hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/util/Utilities.java
+++ b/hyracks/hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/util/Utilities.java
@@ -121,6 +121,12 @@
             public Counter getCounter(Enum<?> arg0) {
                 return null;
             }
+
+            @Override
+            public float getProgress() {
+                // TODO Auto-generated method stub
+                return 0f;
+            }
         };
         return reporter;
     }
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-0.20.2/pom.xml b/hyracks/hyracks-hdfs/hyracks-hdfs-1.x/pom.xml
similarity index 97%
rename from hyracks/hyracks-hdfs/hyracks-hdfs-0.20.2/pom.xml
rename to hyracks/hyracks-hdfs/hyracks-hdfs-1.x/pom.xml
index 988d13c..707cc41 100644
--- a/hyracks/hyracks-hdfs/hyracks-hdfs-0.20.2/pom.xml
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-1.x/pom.xml
@@ -15,8 +15,8 @@
  !-->
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
 	<modelVersion>4.0.0</modelVersion>
-	<artifactId>hyracks-hdfs-0.20.2</artifactId>
-	<name>hyracks-hdfs-0.20.2</name>
+	<artifactId>hyracks-hdfs-1.x</artifactId>
+	<name>hyracks-hdfs-1.x</name>
 	<parent>
 		<artifactId>hyracks-hdfs</artifactId>
 		<groupId>edu.uci.ics.hyracks</groupId>
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-0.20.2/src/main/java/edu/uci/ics/hyracks/hdfs/ContextFactory.java b/hyracks/hyracks-hdfs/hyracks-hdfs-1.x/src/main/java/edu/uci/ics/hyracks/hdfs/ContextFactory.java
similarity index 100%
rename from hyracks/hyracks-hdfs/hyracks-hdfs-0.20.2/src/main/java/edu/uci/ics/hyracks/hdfs/ContextFactory.java
rename to hyracks/hyracks-hdfs/hyracks-hdfs-1.x/src/main/java/edu/uci/ics/hyracks/hdfs/ContextFactory.java
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-0.20.2/src/main/java/edu/uci/ics/hyracks/hdfs/MiniDFSClusterFactory.java b/hyracks/hyracks-hdfs/hyracks-hdfs-1.x/src/main/java/edu/uci/ics/hyracks/hdfs/MiniDFSClusterFactory.java
similarity index 100%
rename from hyracks/hyracks-hdfs/hyracks-hdfs-0.20.2/src/main/java/edu/uci/ics/hyracks/hdfs/MiniDFSClusterFactory.java
rename to hyracks/hyracks-hdfs/hyracks-hdfs-1.x/src/main/java/edu/uci/ics/hyracks/hdfs/MiniDFSClusterFactory.java
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-0.23.1/pom.xml b/hyracks/hyracks-hdfs/hyracks-hdfs-2.x/pom.xml
similarity index 85%
rename from hyracks/hyracks-hdfs/hyracks-hdfs-0.23.1/pom.xml
rename to hyracks/hyracks-hdfs/hyracks-hdfs-2.x/pom.xml
index dbbfe0f..cde0fa3 100644
--- a/hyracks/hyracks-hdfs/hyracks-hdfs-0.23.1/pom.xml
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-2.x/pom.xml
@@ -15,8 +15,8 @@
  !-->
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
 	<modelVersion>4.0.0</modelVersion>
-	<artifactId>hyracks-hdfs-0.23.1</artifactId>
-	<name>hyracks-hdfs-0.23.1</name>
+	<artifactId>hyracks-hdfs-2.x</artifactId>
+	<name>hyracks-hdfs-2.x</name>
 	<parent>
 		<artifactId>hyracks-hdfs</artifactId>
 		<groupId>edu.uci.ics.hyracks</groupId>
@@ -43,6 +43,42 @@
 				<activeByDefault>true</activeByDefault>
 				<property>
 					<name>hadoop</name>
+					<value>2.2.0</value>
+				</property>
+			</activation>
+			<id>hadoop-2.2.0</id>
+			<dependencies>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-common</artifactId>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-mapreduce-client-core</artifactId>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-hdfs</artifactId>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-minicluster</artifactId>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+			</dependencies>
+		</profile>
+		<profile>
+			<activation>
+				<activeByDefault>true</activeByDefault>
+				<property>
+					<name>hadoop</name>
 					<value>0.23.1</value>
 				</property>
 			</activation>
@@ -51,28 +87,24 @@
 				<dependency>
 					<groupId>org.apache.hadoop</groupId>
 					<artifactId>hadoop-common</artifactId>
-					<version>0.23.1</version>
 					<type>jar</type>
 					<scope>compile</scope>
 				</dependency>
 				<dependency>
 					<groupId>org.apache.hadoop</groupId>
 					<artifactId>hadoop-mapreduce-client-core</artifactId>
-					<version>0.23.1</version>
 					<type>jar</type>
 					<scope>compile</scope>
 				</dependency>
 				<dependency>
 					<groupId>org.apache.hadoop</groupId>
 					<artifactId>hadoop-hdfs</artifactId>
-					<version>0.23.1</version>
 					<type>jar</type>
 					<scope>compile</scope>
 				</dependency>
 				<dependency>
 					<groupId>org.apache.hadoop</groupId>
 					<artifactId>hadoop-minicluster</artifactId>
-					<version>0.23.1</version>
 					<type>jar</type>
 					<scope>compile</scope>
 				</dependency>
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-0.23.1/src/main/java/edu/uci/ics/hyracks/hdfs/ContextFactory.java b/hyracks/hyracks-hdfs/hyracks-hdfs-2.x/src/main/java/edu/uci/ics/hyracks/hdfs/ContextFactory.java
similarity index 100%
rename from hyracks/hyracks-hdfs/hyracks-hdfs-0.23.1/src/main/java/edu/uci/ics/hyracks/hdfs/ContextFactory.java
rename to hyracks/hyracks-hdfs/hyracks-hdfs-2.x/src/main/java/edu/uci/ics/hyracks/hdfs/ContextFactory.java
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-0.23.1/src/main/java/edu/uci/ics/hyracks/hdfs/MiniDFSClusterFactory.java b/hyracks/hyracks-hdfs/hyracks-hdfs-2.x/src/main/java/edu/uci/ics/hyracks/hdfs/MiniDFSClusterFactory.java
similarity index 100%
rename from hyracks/hyracks-hdfs/hyracks-hdfs-0.23.1/src/main/java/edu/uci/ics/hyracks/hdfs/MiniDFSClusterFactory.java
rename to hyracks/hyracks-hdfs/hyracks-hdfs-2.x/src/main/java/edu/uci/ics/hyracks/hdfs/MiniDFSClusterFactory.java
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/pom.xml b/hyracks/hyracks-hdfs/hyracks-hdfs-core/pom.xml
index 2e5b9c0..0eb32fc 100644
--- a/hyracks/hyracks-hdfs/hyracks-hdfs-core/pom.xml
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/pom.xml
@@ -67,13 +67,13 @@
 	<profiles>
 		<profile>
 			<activation>
-				<activeByDefault>true</activeByDefault>
+				<activeByDefault>false</activeByDefault>
 			</activation>
 			<id>hadoop-0.20.2</id>
 			<dependencies>
 				<dependency>
 					<groupId>edu.uci.ics.hyracks</groupId>
-					<artifactId>hyracks-hdfs-0.20.2</artifactId>
+					<artifactId>hyracks-hdfs-1.x</artifactId>
 					<version>${project.version}</version>
 					<type>jar</type>
 					<scope>compile</scope>
@@ -92,7 +92,27 @@
 			<dependencies>
 				<dependency>
 					<groupId>edu.uci.ics.hyracks</groupId>
-					<artifactId>hyracks-hdfs-0.20.2</artifactId>
+					<artifactId>hyracks-hdfs-1.x</artifactId>
+					<version>${project.version}</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+			</dependencies>
+		</profile>
+
+		<profile>
+			<activation>
+				<activeByDefault>true</activeByDefault>
+				<property>
+					<name>hadoop</name>
+					<value>2.2.0</value>
+				</property>
+			</activation>
+			<id>hadoop-2.2.0</id>
+			<dependencies>
+				<dependency>
+					<groupId>edu.uci.ics.hyracks</groupId>
+					<artifactId>hyracks-hdfs-2.x</artifactId>
 					<version>${project.version}</version>
 					<type>jar</type>
 					<scope>compile</scope>
@@ -111,7 +131,7 @@
 			<dependencies>
 				<dependency>
 					<groupId>edu.uci.ics.hyracks</groupId>
-					<artifactId>hyracks-hdfs-0.23.1</artifactId>
+					<artifactId>hyracks-hdfs-2.x</artifactId>
 					<version>${project.version}</version>
 					<type>jar</type>
 					<scope>compile</scope>
@@ -130,7 +150,7 @@
 			<dependencies>
 				<dependency>
 					<groupId>edu.uci.ics.hyracks</groupId>
-					<artifactId>hyracks-hdfs-0.23.1</artifactId>
+					<artifactId>hyracks-hdfs-2.x</artifactId>
 					<version>${project.version}</version>
 					<type>jar</type>
 					<scope>compile</scope>
@@ -149,7 +169,7 @@
 			<dependencies>
 				<dependency>
 					<groupId>edu.uci.ics.hyracks</groupId>
-					<artifactId>hyracks-hdfs-0.23.1</artifactId>
+					<artifactId>hyracks-hdfs-2.x</artifactId>
 					<version>${project.version}</version>
 					<type>jar</type>
 					<scope>compile</scope>
@@ -168,7 +188,7 @@
 			<dependencies>
 				<dependency>
 					<groupId>edu.uci.ics.hyracks</groupId>
-					<artifactId>hyracks-hdfs-0.23.1</artifactId>
+					<artifactId>hyracks-hdfs-2.x</artifactId>
 					<version>${project.version}</version>
 					<type>jar</type>
 					<scope>compile</scope>
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/hadoop/conf/hdfs-site.xml b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/hadoop/conf/hdfs-site.xml
index bff9a3e..4bb966d 100644
--- a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/hadoop/conf/hdfs-site.xml
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/hadoop/conf/hdfs-site.xml
@@ -26,7 +26,12 @@
 
 <property>
 	<name>dfs.block.size</name>
-	<value>65536</value>
+	<value>1048576</value>
+</property>
+
+<property>
+	<name>dfs.namenode.fs-limits.min-block-size</name>
+	<value>1048576</value>
 </property>
 
 </configuration>
diff --git a/hyracks/hyracks-hdfs/pom.xml b/hyracks/hyracks-hdfs/pom.xml
index c9b12f6..15919d5 100644
--- a/hyracks/hyracks-hdfs/pom.xml
+++ b/hyracks/hyracks-hdfs/pom.xml
@@ -26,8 +26,8 @@
   </parent>
 
   <modules>
-    <module>hyracks-hdfs-0.20.2</module>
-    <module>hyracks-hdfs-0.23.1</module>
+    <module>hyracks-hdfs-1.x</module>
+    <module>hyracks-hdfs-2.x</module>
     <module>hyracks-hdfs-core</module>
   </modules>
 </project>
diff --git a/pom.xml b/pom.xml
index 9267072..d94e962 100644
--- a/pom.xml
+++ b/pom.xml
@@ -22,21 +22,60 @@
   <name>hyracks-ecosystem-full-stack</name>
 
   <properties>
+    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
     <jvm.extraargs />
-
     <!-- Definition of tests in various categories which may be excluded -->
     <hanging.pregelix.tests>**/pregelix/**/FailureRecovery*.java</hanging.pregelix.tests>
     <hivesterix.perf.tests>**/hivesterix/perf/PerfTestSuite.java</hivesterix.perf.tests>
     <global.test.includes>**/*TestSuite.java,**/*Test.java</global.test.includes>
     <global.test.excludes>**/Abstract*.java,${hanging.pregelix.tests},${hivesterix.perf.tests}</global.test.excludes>
+    <!-- Versions under dependencymanagement or used in many projects via properties -->
+    <hadoop.version>2.2.0</hadoop.version>
+    <junit.version>4.8.1</junit.version>
+    <commons.io.version>2.4</commons.io.version>
   </properties>
   <dependencyManagement>
     <dependencies>
       <dependency>
         <groupId>junit</groupId>
         <artifactId>junit</artifactId>
-        <version>4.8.1</version>
+        <version>${junit.version}</version>
       </dependency>
+          <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-yarn-client</artifactId>
+            <version>${hadoop.version}</version>
+          </dependency>
+          <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-client</artifactId>
+            <version>${hadoop.version}</version>
+          </dependency>
+          <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-common</artifactId>
+            <version>${hadoop.version}</version>
+          </dependency>
+          <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-hdfs</artifactId>
+            <version>${hadoop.version}</version>
+          </dependency>
+          <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-minicluster</artifactId>
+            <version>${hadoop.version}</version>
+          </dependency>
+          <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-mapreduce-client-core</artifactId>
+            <version>${hadoop.version}</version>
+          </dependency>
+        <dependency>
+            <groupId>commons-io</groupId>
+            <artifactId>commons-io</artifactId>
+            <version>${commons.io.version}</version>
+        </dependency>
     </dependencies>
   </dependencyManagement>
 
@@ -133,7 +172,5 @@
   <modules>
     <module>hyracks</module>
     <module>algebricks</module>
-    <module>pregelix</module>
-    <module>hivesterix</module>
   </modules>
 </project>