integration of asterix-installer: checkpoint 1

git-svn-id: https://asterixdb.googlecode.com/svn/branches/asterix_stabilization_installer@1219 eaa15691-b419-025a-1212-ee371bd00084
diff --git a/asterix-events/pom.xml b/asterix-events/pom.xml
index a9aeaa5..46bb96a 100644
--- a/asterix-events/pom.xml
+++ b/asterix-events/pom.xml
@@ -125,6 +125,21 @@
             </execution>
           </executions>
        </plugin>
+       <plugin>
+         <artifactId>maven-assembly-plugin</artifactId>
+         <version>2.2-beta-2</version>
+         <executions>
+           <execution>
+             <configuration>
+               <descriptor>src/main/assembly/binary-assembly.xml</descriptor>
+             </configuration>
+             <phase>package</phase>
+             <goals>
+               <goal>attached</goal>
+             </goals>
+           </execution>
+         </executions>
+       </plugin>
     </plugins>
   </build>
 
diff --git a/asterix-events/src/main/assembly/binary-assembly.xml b/asterix-events/src/main/assembly/binary-assembly.xml
new file mode 100644
index 0000000..29ebbdd
--- /dev/null
+++ b/asterix-events/src/main/assembly/binary-assembly.xml
@@ -0,0 +1,27 @@
+<assembly>
+  <id>bin</id>
+  <formats>
+    <format>tar.gz</format>
+    <format>tar.bz2</format>
+    <format>zip</format>
+  </formats>
+  <fileSets>
+    <fileSet>
+      <directory>src/main/resources/events</directory>
+      <outputDirectory>events</outputDirectory>
+      <includes></includes>
+    </fileSet>
+    <fileSet>
+      <directory>src/main/resources/scripts</directory>
+      <outputDirectory>scripts</outputDirectory>
+      <includes></includes>
+    </fileSet>
+    <fileSet>
+      <directory>target</directory>
+      <outputDirectory>target</outputDirectory>
+      <includes>
+        <include>*.jar</include>
+      </includes>
+    </fileSet>
+  </fileSets>
+</assembly>
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/driver/EventDriver.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/driver/EventDriver.java
index 379d811..ddefdd1 100644
--- a/asterix-events/src/main/java/edu/uci/ics/asterix/event/driver/EventDriver.java
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/driver/EventDriver.java
@@ -20,7 +20,6 @@
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.logging.Logger;
 
 import javax.xml.bind.JAXBContext;
 import javax.xml.bind.JAXBException;
@@ -41,143 +40,134 @@
 
 public class EventDriver {
 
-    public static final String CLIENT_NODE_ID = "client_node";
-    public static final Node CLIENT_NODE = new Node(CLIENT_NODE_ID, "127.0.0.1", null, null, null);
+	public static final String CLIENT_NODE_ID = "client_node";
+	public static final Node CLIENT_NODE = new Node(CLIENT_NODE_ID,
+			"127.0.0.1", null, null, null);
 
-    private static final Logger LOGGER = Logger.getLogger(EventDriver.class.getName());
+	private static String eventsDir;
+	private static Events events;
+	private static Map<String, String> env = new HashMap<String, String>();
+	private static String scriptDirSuffix;
 
-    private static String homeDir;
-    private static Events events;
-    private static Map<String, String> env = new HashMap<String, String>();
-    private static String scriptDirSuffix;
+	public static String getEventsDir() {
+		return eventsDir;
+	}
 
-    public static String getHomeDir() {
-        return homeDir;
-    }
+	public static Events getEvents() {
+		return events;
+	}
 
-    public static void setHomeDir(String homeDir) {
-        EventDriver.homeDir = homeDir;
-    }
+	public static Map<String, String> getEnvironment() {
+		return env;
+	}
 
-    public static Events getEvents() {
-        return events;
-    }
+	public static String getStringifiedEnv(Cluster cluster) {
+		StringBuffer buffer = new StringBuffer();
+		for (Property p : cluster.getEnv().getProperty()) {
+			buffer.append(p.getKey() + "=" + p.getValue() + " ");
+		}
+		return buffer.toString();
+	}
 
-    public static void setEvents(Events events) {
-        EventDriver.events = events;
-    }
+	public static Cluster initializeCluster(String path) throws JAXBException,
+			IOException {
+		File file = new File(path);
+		JAXBContext ctx = JAXBContext.newInstance(Cluster.class);
+		Unmarshaller unmarshaller = ctx.createUnmarshaller();
+		Cluster cluster = (Cluster) unmarshaller.unmarshal(file);
+		for (Property p : cluster.getEnv().getProperty()) {
+			env.put(p.getKey(), p.getValue());
+		}
+		return cluster;
+	}
 
-    public static Map<String, String> getEnvironment() {
-        return env;
-    }
+	public static Patterns initializePatterns(String path)
+			throws JAXBException, IOException {
+		File file = new File(path);
+		JAXBContext ctx = JAXBContext.newInstance(Patterns.class);
+		Unmarshaller unmarshaller = ctx.createUnmarshaller();
+		return (Patterns) unmarshaller.unmarshal(file);
+	}
 
-    public static String getStringifiedEnv(Cluster cluster) {
-        StringBuffer buffer = new StringBuffer();
-        for (Property p : cluster.getEnv().getProperty()) {
-            buffer.append(p.getKey() + "=" + p.getValue() + " ");
-        }
-        return buffer.toString();
-    }
+	private static void initialize(EventConfig eventConfig) throws IOException,
+			JAXBException {
 
-    public static void initializeEvents(String path) throws IOException {
-        try {
-            File eventsFile = new File(path);
-            JAXBContext ctx = JAXBContext.newInstance(Events.class);
-            Unmarshaller unmarshaller = ctx.createUnmarshaller();
-            events = (Events) unmarshaller.unmarshal(eventsFile);
-        } catch (JAXBException e) {
-            e.printStackTrace();
-        }
-    }
+	}
 
-    public static Cluster initializeCluster(String path) throws JAXBException, IOException {
-        File file = new File(path);
-        JAXBContext ctx = JAXBContext.newInstance(Cluster.class);
-        Unmarshaller unmarshaller = ctx.createUnmarshaller();
-        Cluster cluster = (Cluster) unmarshaller.unmarshal(file);
-        for (Property p : cluster.getEnv().getProperty()) {
-            env.put(p.getKey(), p.getValue());
-        }
-        return cluster;
-    }
+	public static EventrixClient getClient(String eventsDir, Cluster cluster,
+			boolean dryRun) throws Exception {
+		return new EventrixClient(eventsDir, cluster, dryRun,
+				new DefaultOutputHandler());
+	}
 
-    public static Patterns initializePatterns(String path) throws JAXBException, IOException {
-        File file = new File(path);
-        JAXBContext ctx = JAXBContext.newInstance(Patterns.class);
-        Unmarshaller unmarshaller = ctx.createUnmarshaller();
-        return (Patterns) unmarshaller.unmarshal(file);
-    }
+	public static EventrixClient getClient(String eventsDir, Cluster cluster,
+			boolean dryRun, IOutputHandler outputHandler) throws Exception {
+		return new EventrixClient(eventsDir, cluster, dryRun, outputHandler);
+	}
 
-    private static void initialize(EventConfig eventConfig) throws IOException, JAXBException {
-        homeDir = System.getenv("EVENT_HOME");
-        if (homeDir == null) {
-            throw new IllegalStateException("EVENT_HOME is not set");
-        }
-        initializeEvents(homeDir + "/" + EventUtil.EVENTS_DIR + "/" + "events.xml");
-    }
+	public static void main(String[] args) throws Exception {
+		String eventsHome = System.getenv("EVENT_HOME");
+		if (eventsHome == null) {
+			throw new IllegalStateException("EVENT_HOME is not set");
+		}
+		eventsDir = eventsHome + File.separator + EventUtil.EVENTS_DIR;
+		EventConfig eventConfig = new EventConfig();
+		CmdLineParser parser = new CmdLineParser(eventConfig);
+		try {
+			parser.parseArgument(args);
+			if (eventConfig.help) {
+				parser.printUsage(System.out);
+			}
+			if (eventConfig.seed > 0) {
+				Randomizer.getInstance(eventConfig.seed);
+			}
+			Cluster cluster = initializeCluster(eventConfig.clusterPath);
+			Patterns patterns = initializePatterns(eventConfig.patternPath);
+			initialize(eventConfig);
 
-    public static EventrixClient getClient(Cluster cluster, boolean dryRun) throws Exception {
-        return new EventrixClient(cluster, dryRun, new DefaultOutputHandler());
-    }
+			if (!eventConfig.dryRun) {
+				prepare(cluster);
+			}
+			EventrixClient client = new EventrixClient(eventsDir, cluster,
+					eventConfig.dryRun, new DefaultOutputHandler());
+			client.submit(patterns);
+			if (!eventConfig.dryRun) {
+				cleanup(cluster);
+			}
+		} catch (Exception e) {
+			e.printStackTrace();
+			parser.printUsage(System.err);
+		}
+	}
 
-    public static EventrixClient getClient(Cluster cluster, boolean dryRun, IOutputHandler outputHandler)
-            throws Exception {
-        return new EventrixClient(cluster, dryRun, outputHandler);
-    }
+	private static void prepare(Cluster cluster) throws IOException,
+			InterruptedException {
 
-    public static void main(String[] args) throws Exception {
-        EventConfig eventConfig = new EventConfig();
-        CmdLineParser parser = new CmdLineParser(eventConfig);
-        try {
-            parser.parseArgument(args);
-            if (eventConfig.help) {
-                parser.printUsage(System.out);
-            }
-            if (eventConfig.seed > 0) {
-                Randomizer.getInstance(eventConfig.seed);
-            }
-            Cluster cluster = initializeCluster(eventConfig.clusterPath);
-            Patterns patterns = initializePatterns(eventConfig.patternPath);
+		scriptDirSuffix = "" + System.nanoTime();
+		List<String> args = new ArrayList<String>();
+		args.add(scriptDirSuffix);
+		Node clientNode = new Node();
+		clientNode.setId("client");
+		clientNode.setIp("127.0.0.1");
+		for (Node node : cluster.getNode()) {
+			args.add(node.getIp());
+		}
+		EventUtil.executeLocalScript(clientNode, eventsDir + "/" + "events"
+				+ "/" + "prepare.sh", args);
+	}
 
-            initialize(eventConfig);
-            if (!eventConfig.dryRun) {
-                prepare(cluster);
-            }
-            EventrixClient client = new EventrixClient(cluster, eventConfig.dryRun, new DefaultOutputHandler());
-            client.submit(patterns);
-            if (!eventConfig.dryRun) {
-                cleanup(cluster);
-            }
-        } catch (Exception e) {
-            e.printStackTrace();
-            parser.printUsage(System.err);
-        }
-    }
-
-    private static void prepare(Cluster cluster) throws IOException, InterruptedException {
-
-        scriptDirSuffix = "" + System.nanoTime();
-        List<String> args = new ArrayList<String>();
-        args.add(scriptDirSuffix);
-        Node clientNode = new Node();
-        clientNode.setId("client");
-        clientNode.setIp("127.0.0.1");
-        for (Node node : cluster.getNode()) {
-            args.add(node.getIp());
-        }
-        EventUtil.executeLocalScript(clientNode, homeDir + "/" + "events" + "/" + "prepare.sh", args);
-    }
-
-    private static void cleanup(Cluster cluster) throws IOException, InterruptedException {
-        List<String> args = new ArrayList<String>();
-        args.add(scriptDirSuffix);
-        Node clientNode = new Node();
-        clientNode.setId("client");
-        clientNode.setIp("127.0.0.1");
-        for (Node node : cluster.getNode()) {
-            args.add(node.getIp());
-        }
-        EventUtil.executeLocalScript(clientNode, homeDir + "/" + "events" + "/" + "cleanup.sh", args);
-    }
+	private static void cleanup(Cluster cluster) throws IOException,
+			InterruptedException {
+		List<String> args = new ArrayList<String>();
+		args.add(scriptDirSuffix);
+		Node clientNode = new Node();
+		clientNode.setId("client");
+		clientNode.setIp("127.0.0.1");
+		for (Node node : cluster.getNode()) {
+			args.add(node.getIp());
+		}
+		EventUtil.executeLocalScript(clientNode, eventsDir + "/" + "events"
+				+ "/" + "cleanup.sh", args);
+	}
 
 }
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventExecutor.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventExecutor.java
index 869edd7..92bdf72 100644
--- a/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventExecutor.java
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventExecutor.java
@@ -39,16 +39,16 @@
     private static final String DAEMON = "DAEMON";
 
     public void executeEvent(Node node, String script, List<String> args, boolean isDaemon, Cluster cluster,
-            Pattern pattern, IOutputHandler outputHandler) throws IOException {
+            Pattern pattern, IOutputHandler outputHandler, EventrixClient client) throws IOException {
         List<String> pargs = new ArrayList<String>();
         pargs.add("/bin/bash");
-        pargs.add(EventDriver.getHomeDir() + File.separator + "events.pkg" + File.separator + EXECUTE_SCRIPT);
-        StringBuffer argBuffer = new StringBuffer();
+        pargs.add(client.getEventsDir() + File.separator + "scripts" + File.separator + EXECUTE_SCRIPT);
         StringBuffer envBuffer = new StringBuffer(IP_LOCATION + "=" + node.getIp());
         if (!node.getId().equals(EventDriver.CLIENT_NODE_ID)) {
             envBuffer.append(" " + EventDriver.getStringifiedEnv(cluster));
             pargs.add(cluster.getUsername() == null ? System.getProperty("user.name") : cluster.getUsername());
         }
+        StringBuffer argBuffer = new StringBuffer();
         if (args != null && args.size() > 0) {
             for (String arg : args) {
                 argBuffer.append(arg + " ");
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventTask.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventTask.java
index aac2604..9f52642 100644
--- a/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventTask.java
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventTask.java
@@ -25,131 +25,142 @@
 
 import edu.uci.ics.asterix.event.driver.EventDriver;
 import edu.uci.ics.asterix.event.schema.cluster.Node;
+import edu.uci.ics.asterix.event.schema.event.Event;
 import edu.uci.ics.asterix.event.schema.pattern.Pattern;
 import edu.uci.ics.asterix.event.schema.pattern.Period;
 
 public class EventTask extends TimerTask {
 
-    public static enum State {
-        INITIALIZED,
-        IN_PROGRESS,
-        COMPLETED,
-        FAILED
-    }
+	public static enum State {
+		INITIALIZED, IN_PROGRESS, COMPLETED, FAILED
+	}
 
-    private static final Logger logger = Logger.getLogger(EventTask.class.getName());
+	private static final Logger logger = Logger.getLogger(EventTask.class
+			.getName());
 
-    private Pattern pattern;
-    private long interval = 0;
-    private long initialDelay = 0;
-    private int maxOccurs = Integer.MAX_VALUE;
-    private int occurrenceCount = 0;
-    private Timer timer;
-    private String taskScript;
-    private Node location;
-    private List<String> taskArgs;
-    private EventrixClient scheduler;
-    private List<Node> candidateLocations;
-    private boolean dynamicLocation = false;
-    private boolean reuseLocation = false;
-    private State state;
+	private Pattern pattern;
+	private Event event;
+	private long interval = 0;
+	private long initialDelay = 0;
+	private int maxOccurs = Integer.MAX_VALUE;
+	private int occurrenceCount = 0;
+	private Timer timer;
+	private String taskScript;
+	private Node location;
+	private List<String> taskArgs;
+	private EventrixClient client;
+	private List<Node> candidateLocations;
+	private boolean dynamicLocation = false;
+	private boolean reuseLocation = false;
+	private State state;
 
-    static {
-        logger.setLevel(Level.WARNING);
-    }
+	static {
+		logger.setLevel(Level.WARNING);
+	}
 
-    public EventTask(Pattern pattern, EventrixClient client) {
-        this.pattern = pattern;
-        this.scheduler = client;
-        Period period = pattern.getPeriod();
-        if (period != null && period.getAbsvalue() != null) {
-            this.interval = EventUtil.parseTimeInterval(period.getAbsvalue(), period.getUnit());
-        }
-        if (pattern.getDelay() != null) {
-            this.initialDelay = EventUtil.parseTimeInterval(new ValueType(pattern.getDelay().getValue()), pattern
-                    .getDelay().getUnit());
-        }
-        if (pattern.getMaxOccurs() != null) {
-            this.maxOccurs = pattern.getMaxOccurs();
-        }
-        this.timer = new Timer();
-        taskArgs = EventUtil.getEventArgs(pattern);
-        candidateLocations = EventUtil.getCandidateLocations(pattern, client.getCluster());
-        if (pattern.getEvent().getNodeid().getValue().getRandom() != null && period != null && maxOccurs > 1) {
-            dynamicLocation = true;
-            reuseLocation = pattern.getEvent().getNodeid().getValue().getRandom().getRange().isReuse();
-        } else {
-            location = EventUtil.getEventLocation(pattern, candidateLocations, scheduler.getCluster());
-        }
-        String scriptsDir;
-        if (location.getId().equals(EventDriver.CLIENT_NODE_ID)) {
-            scriptsDir = EventDriver.getHomeDir() + File.separator + "events.pkg";
-        } else {
-            scriptsDir = client.getCluster().getWorkingDir().getDir() + File.separator + "events.pkg";
-        }
-        taskScript = scriptsDir + File.separator + EventUtil.getEvent(pattern).getScript();
-        state = State.INITIALIZED;
-    }
+	public EventTask(Pattern pattern, EventrixClient client) {
+		this.pattern = pattern;
+		this.client = client;
+		Period period = pattern.getPeriod();
+		if (period != null && period.getAbsvalue() != null) {
+			this.interval = EventUtil.parseTimeInterval(period.getAbsvalue(),
+					period.getUnit());
+		}
+		if (pattern.getDelay() != null) {
+			this.initialDelay = EventUtil.parseTimeInterval(new ValueType(
+					pattern.getDelay().getValue()), pattern.getDelay()
+					.getUnit());
+		}
+		if (pattern.getMaxOccurs() != null) {
+			this.maxOccurs = pattern.getMaxOccurs();
+		}
+		this.timer = new Timer();
+		taskArgs = EventUtil.getEventArgs(pattern);
+		candidateLocations = EventUtil.getCandidateLocations(pattern,
+				client.getCluster());
+		if (pattern.getEvent().getNodeid().getValue().getRandom() != null
+				&& period != null && maxOccurs > 1) {
+			dynamicLocation = true;
+			reuseLocation = pattern.getEvent().getNodeid().getValue()
+					.getRandom().getRange().isReuse();
+		} else {
+			location = EventUtil.getEventLocation(pattern, candidateLocations,
+					client.getCluster());
+		}
+		String scriptsDir;
+		if (location.getId().equals(EventDriver.CLIENT_NODE_ID)) {
+			scriptsDir = client.getEventsDir() + File.separator + "events";
+		} else {
+			scriptsDir = client.getCluster().getWorkingDir().getDir() + File.separator + "eventrix" + File.separator + "events";
+		}
+		event = EventUtil.getEvent(pattern, client.getEvents());
+		taskScript = scriptsDir + File.separator + event.getScript();
+		state = State.INITIALIZED;
+	}
 
-    public void start() {
-        if (interval > 0) {
-            timer.schedule(this, initialDelay, interval);
-        } else {
-            timer.schedule(this, initialDelay);
-        }
-    }
+	public void start() {
+		if (interval > 0) {
+			timer.schedule(this, initialDelay, interval);
+		} else {
+			timer.schedule(this, initialDelay);
+		}
+	}
 
-    @Override
-    public void run() {
-        if (candidateLocations.size() == 0) {
-            timer.cancel();
-            scheduler.notifyCompletion(new EventTaskReport(this));
-        } else {
-            if (dynamicLocation) {
-                location = EventUtil.getEventLocation(pattern, candidateLocations, scheduler.getCluster());
-                if (!reuseLocation) {
-                    candidateLocations.remove(location);
-                }
-            }
+	@Override
+	public void run() {
+		if (candidateLocations.size() == 0) {
+			timer.cancel();
+			client.notifyCompletion(new EventTaskReport(this));
+		} else {
+			if (dynamicLocation) {
+				location = EventUtil.getEventLocation(pattern,
+						candidateLocations, client.getCluster());
+				if (!reuseLocation) {
+					candidateLocations.remove(location);
+				}
+			}
 
-            logger.info(EventUtil.dateFormat.format(new Date()) + " " + "EVENT "
-                    + pattern.getEvent().getType().toUpperCase() + " at " + location.getId().toUpperCase());
-            try {
-                if (!scheduler.isDryRun()) {
-                    new EventExecutor().executeEvent(location, taskScript, taskArgs, EventUtil.getEvent(pattern)
-                            .isDaemon(), scheduler.getCluster(), pattern, scheduler.getErrorHandler());
-                }
-                occurrenceCount++;
-                if (occurrenceCount >= maxOccurs) {
-                    timer.cancel();
-                    scheduler.notifyCompletion(new EventTaskReport(this));
-                }
-            } catch (IOException ioe) {
-                timer.cancel();
-                scheduler.notifyCompletion(new EventTaskReport(this, false, ioe));
-            }
-        }
+			logger.info(EventUtil.dateFormat.format(new Date()) + " "
+					+ "EVENT " + pattern.getEvent().getType().toUpperCase()
+					+ " at " + location.getId().toUpperCase());
+			try {
+				if (!client.isDryRun()) {
+					new EventExecutor().executeEvent(location, taskScript,
+							taskArgs, event.isDaemon(), client.getCluster(),
+							pattern, client.getErrorHandler(), client);
+				}
+				occurrenceCount++;
+				if (occurrenceCount >= maxOccurs) {
+					timer.cancel();
+					client.notifyCompletion(new EventTaskReport(this));
+				}
+			} catch (IOException ioe) {
+				timer.cancel();
+				client
+						.notifyCompletion(new EventTaskReport(this, false, ioe));
+			}
+		}
 
-    }
+	}
 
-    public Node getLocation() {
-        return location;
-    }
+	public Node getLocation() {
+		return location;
+	}
 
-    public long getInterval() {
-        return interval;
-    }
+	public long getInterval() {
+		return interval;
+	}
 
-    public long getInitialDelay() {
-        return initialDelay;
-    }
+	public long getInitialDelay() {
+		return initialDelay;
+	}
 
-    public Pattern getPattern() {
-        return pattern;
-    }
+	public Pattern getPattern() {
+		return pattern;
+	}
 
-    public State getState() {
-        return state;
-    }
+	public State getState() {
+		return state;
+	}
 
 }
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventUtil.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventUtil.java
index 80bc0da..5f70c59 100644
--- a/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventUtil.java
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventUtil.java
@@ -19,220 +19,235 @@
 import java.text.SimpleDateFormat;
 import java.util.ArrayList;
 import java.util.List;
-import java.util.logging.Logger;
 
 import edu.uci.ics.asterix.event.driver.EventDriver;
 import edu.uci.ics.asterix.event.management.ValueType.Type;
 import edu.uci.ics.asterix.event.schema.cluster.Cluster;
 import edu.uci.ics.asterix.event.schema.cluster.Node;
 import edu.uci.ics.asterix.event.schema.event.Event;
+import edu.uci.ics.asterix.event.schema.event.Events;
 import edu.uci.ics.asterix.event.schema.pattern.Pattern;
 
 public class EventUtil {
 
-    public static final String EVENTS_DIR = "events";
-    public static final String CLUSTER_CONF = "config/cluster.xml";
-    public static final String PATTERN_CONF = "config/pattern.xml";
-    private static final String IP_LOCATION = "IP_LOCATION";
-    private static final String CLUSTER_ENV = "ENV";
-    private static final String SCRIPT = "SCRIPT";
-    private static final String ARGS = "ARGS";
-    private static final String EXECUTE_SCRIPT = "events/execute.sh";
+	public static final String EVENTS_DIR = "events";
+	public static final String CLUSTER_CONF = "config/cluster.xml";
+	public static final String PATTERN_CONF = "config/pattern.xml";
+	public static final DateFormat dateFormat = new SimpleDateFormat(
+			"yyyy/MM/dd HH:mm:ss");
 
-    public static final DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
+	private static final String IP_LOCATION = "IP_LOCATION";
+	private static final String CLUSTER_ENV = "ENV";
+	private static final String SCRIPT = "SCRIPT";
+	private static final String ARGS = "ARGS";
+	private static final String EXECUTE_SCRIPT = "events/execute.sh";
 
-    private static final Logger logger = Logger.getLogger(EventDriver.class.getName());
+	public static long parseTimeInterval(ValueType v, String unit)
+			throws IllegalArgumentException {
+		int val = 0;
+		switch (v.getType()) {
+		case ABS:
+			val = Integer.parseInt(v.getAbsoluteValue());
+			break;
+		case RANDOM_MIN_MAX:
+			val = Randomizer.getInstance().getRandomInt(v.getMin(), v.getMax());
+			break;
+		case RANDOM_RANGE:
+			String[] values = v.getRangeSet();
+			val = Integer.parseInt(values[Randomizer.getInstance()
+					.getRandomInt(0, values.length - 1)]);
+			break;
+		}
+		return computeInterval(val, unit);
+	}
 
-    public static long parseTimeInterval(ValueType v, String unit) throws IllegalArgumentException {
-        int val = 0;
-        switch (v.getType()) {
-            case ABS:
-                val = Integer.parseInt(v.getAbsoluteValue());
-                break;
-            case RANDOM_MIN_MAX:
-                val = Randomizer.getInstance().getRandomInt(v.getMin(), v.getMax());
-                break;
-            case RANDOM_RANGE:
-                String[] values = v.getRangeSet();
-                val = Integer.parseInt(values[Randomizer.getInstance().getRandomInt(0, values.length - 1)]);
-                break;
-        }
-        return computeInterval(val, unit);
-    }
+	public static long parseTimeInterval(String v, String unit)
+			throws IllegalArgumentException {
+		int value = Integer.parseInt(v);
+		return computeInterval(value, unit);
+	}
 
-    public static long parseTimeInterval(String v, String unit) throws IllegalArgumentException {
-        int value = Integer.parseInt(v);
-        return computeInterval(value, unit);
-    }
+	private static long computeInterval(int val, String unit) {
+		int vmult = 1;
+		if ("hr".equalsIgnoreCase(unit)) {
+			vmult = 3600 * 1000;
+		} else if ("min".equalsIgnoreCase(unit)) {
+			vmult = 60 * 1000;
+		} else if ("sec".equalsIgnoreCase(unit)) {
+			vmult = 1000;
+		} else
+			throw new IllegalArgumentException(
+					" invalid unit value specified for frequency (hr,min,sec)");
+		return val * vmult;
 
-    private static long computeInterval(int val, String unit) {
-        int vmult = 1;
-        if ("hr".equalsIgnoreCase(unit)) {
-            vmult = 3600 * 1000;
-        } else if ("min".equalsIgnoreCase(unit)) {
-            vmult = 60 * 1000;
-        } else if ("sec".equalsIgnoreCase(unit)) {
-            vmult = 1000;
-        } else
-            throw new IllegalArgumentException(" invalid unit value specified for frequency (hr,min,sec)");
-        return val * vmult;
+	}
+	
+	 public static  Event getEvent(Pattern pattern, Events events) {
+			for (Event event : events.getEvent()) {
+				if (event.getType().equals(pattern.getEvent().getType())) {
+					return event;
+				}
+			}
+			throw new IllegalArgumentException(" Unknown event type"
+					+ pattern.getEvent().getType());
+     }
+	
+	 public static Node getEventLocation(Pattern pattern,
+			List<Node> candidateLocations, Cluster cluster) {
+		ValueType value = new ValueType(pattern.getEvent().getNodeid()
+				.getValue());
+		Node location = null;
+		Type vtype = value.getType();
 
-    }
+		switch (vtype) {
+		case ABS:
+			location = getNodeFromId(value.getAbsoluteValue(), cluster);
+			break;
+		case RANDOM_RANGE:
+			int nodeIndex = Randomizer.getInstance().getRandomInt(0,
+					candidateLocations.size() - 1);
+			location = candidateLocations.get(nodeIndex);
+			break;
+		case RANDOM_MIN_MAX:
+			throw new IllegalStateException(
+					" Canont configure a min max value range for location");
+		}
+		return location;
 
-    public static Event getEvent(Pattern pattern) {
-        for (Event event : EventDriver.getEvents().getEvent()) {
-            if (event.getType().equals(pattern.getEvent().getType())) {
-                return event;
-            }
-        }
-        throw new IllegalArgumentException(" Unknown event type" + pattern.getEvent().getType());
-    }
+	}
 
-    public static Node getEventLocation(Pattern pattern, List<Node> candidateLocations, Cluster cluster) {
-        ValueType value = new ValueType(pattern.getEvent().getNodeid().getValue());
-        Node location = null;
-        Type vtype = value.getType();
+	public static List<Node> getCandidateLocations(Pattern pattern,
+			Cluster cluster) {
+		ValueType value = new ValueType(pattern.getEvent().getNodeid()
+				.getValue());
+		List<Node> candidateList = new ArrayList<Node>();
+		switch (value.getType()) {
+		case ABS:
+			candidateList.add(getNodeFromId(value.getAbsoluteValue(), cluster));
+			break;
+		case RANDOM_RANGE:
+			boolean anyOption = false;
+			String[] values = value.getRangeSet();
+			for (String v : values) {
+				if (v.equalsIgnoreCase("ANY")) {
+					anyOption = true;
+				}
+			}
+			if (anyOption) {
+				for (Node node : cluster.getNode()) {
+					candidateList.add(node);
+				}
+			} else {
+				boolean found = false;
+				for (String v : values) {
+					for (Node node : cluster.getNode()) {
+						if (node.getId().equals(v)) {
+							candidateList.add(node);
+							found = true;
+							break;
+						}
+					}
+					if (!found) {
+						throw new IllegalStateException("Unknonw nodeId : " + v);
+					}
+					found = false;
+				}
 
-        switch (vtype) {
-            case ABS:
-                location = getNodeFromId(value.getAbsoluteValue(), cluster);
-                break;
-            case RANDOM_RANGE:
-                int nodeIndex = Randomizer.getInstance().getRandomInt(0, candidateLocations.size() - 1);
-                location = candidateLocations.get(nodeIndex);
-                break;
-            case RANDOM_MIN_MAX:
-                throw new IllegalStateException(" Canont configure a min max value range for location");
-        }
-        return location;
+			}
+			String[] excluded = value.getRangeExcluded();
+			if (excluded != null && excluded.length > 0) {
+				List<Node> markedForRemoval = new ArrayList<Node>();
+				for (String exclusion : excluded) {
+					for (Node node : candidateList) {
+						if (node.getId().equals(exclusion)) {
+							markedForRemoval.add(node);
+						}
+					}
+				}
+				candidateList.removeAll(markedForRemoval);
+			}
+			break;
+		case RANDOM_MIN_MAX:
+			throw new IllegalStateException(
+					" Invalid value configured for location");
+		}
+		return candidateList;
+	}
 
-    }
+	private static Node getNodeFromId(String nodeid, Cluster cluster) {
+		if (nodeid.equals(EventDriver.CLIENT_NODE.getId())) {
+			return EventDriver.CLIENT_NODE;
+		}
 
-    public static List<Node> getCandidateLocations(Pattern pattern, Cluster cluster) {
-        ValueType value = new ValueType(pattern.getEvent().getNodeid().getValue());
-        List<Node> candidateList = new ArrayList<Node>();
-        switch (value.getType()) {
-            case ABS:
-                candidateList.add(getNodeFromId(value.getAbsoluteValue(), cluster));
-                break;
-            case RANDOM_RANGE:
-                boolean anyOption = false;
-                String[] values = value.getRangeSet();
-                for (String v : values) {
-                    if (v.equalsIgnoreCase("ANY")) {
-                        anyOption = true;
-                    }
-                }
-                if (anyOption) {
-                    for (Node node : cluster.getNode()) {
-                        candidateList.add(node);
-                    }
-                } else {
-                    boolean found = false;
-                    for (String v : values) {
-                        for (Node node : cluster.getNode()) {
-                            if (node.getId().equals(v)) {
-                                candidateList.add(node);
-                                found = true;
-                                break;
-                            }
-                        }
-                        if (!found) {
-                            throw new IllegalStateException("Unknonw nodeId : " + v);
-                        }
-                        found = false;
-                    }
+		if (nodeid.equals(cluster.getMasterNode().getId())) {
+			return new Node(cluster.getMasterNode().getId(), cluster
+					.getMasterNode().getIp(), null, null, null);
+		}
 
-                }
-                String[] excluded = value.getRangeExcluded();
-                if (excluded != null && excluded.length > 0) {
-                    List<Node> markedForRemoval = new ArrayList<Node>();
-                    for (String exclusion : excluded) {
-                        for (Node node : candidateList) {
-                            if (node.getId().equals(exclusion)) {
-                                markedForRemoval.add(node);
-                            }
-                        }
-                    }
-                    candidateList.removeAll(markedForRemoval);
-                }
-                break;
-            case RANDOM_MIN_MAX:
-                throw new IllegalStateException(" Invalid value configured for location");
-        }
-        return candidateList;
-    }
+		List<Node> nodeList = cluster.getNode();
+		for (Node node : nodeList) {
+			if (node.getId().equals(nodeid)) {
+				return node;
+			}
+		}
+		StringBuffer buffer = new StringBuffer();
+		buffer.append(EventDriver.CLIENT_NODE.getId() + ",");
+		buffer.append(cluster.getMasterNode().getId() + ",");
+		for (Node v : cluster.getNode()) {
+			buffer.append(v.getId() + ",");
+		}
+		buffer.deleteCharAt(buffer.length() - 1);
+		throw new IllegalArgumentException("Unknown node id :" + nodeid
+				+ " valid ids:" + buffer);
+	}
 
-    private static Node getNodeFromId(String nodeid, Cluster cluster) {
-        if (nodeid.equals(EventDriver.CLIENT_NODE.getId())) {
-            return EventDriver.CLIENT_NODE;
-        }
+	public static void executeEventScript(Node node, String script,
+			List<String> args, Cluster cluster) throws IOException,
+			InterruptedException {
+		List<String> pargs = new ArrayList<String>();
+		pargs.add("/bin/bash");
+		pargs.add(EventDriver.getEventsDir() + "/" + EXECUTE_SCRIPT);
+		StringBuffer argBuffer = new StringBuffer();
+		String env = EventDriver.getStringifiedEnv(cluster) + " " + IP_LOCATION
+				+ "=" + node.getIp();
+		if (args != null) {
+			for (String arg : args) {
+				argBuffer.append(arg + " ");
+			}
+		}
+		ProcessBuilder pb = new ProcessBuilder(pargs);
+		pb.environment().putAll(EventDriver.getEnvironment());
+		pb.environment().put(IP_LOCATION, node.getIp());
+		pb.environment().put(CLUSTER_ENV, env);
+		pb.environment().put(SCRIPT, script);
+		pb.environment().put(ARGS, argBuffer.toString());
+		pb.start();
+	}
 
-        if (nodeid.equals(cluster.getMasterNode().getId())) {
-            return new Node(cluster.getMasterNode().getId(), cluster.getMasterNode().getIp(), null, null, null);
-        }
+	public static void executeLocalScript(Node node, String script,
+			List<String> args) throws IOException, InterruptedException {
+		List<String> pargs = new ArrayList<String>();
+		pargs.add("/bin/bash");
+		pargs.add(script);
+		if (args != null) {
+			pargs.addAll(args);
+		}
+		ProcessBuilder pb = new ProcessBuilder(pargs);
+		pb.environment().putAll(EventDriver.getEnvironment());
+		pb.environment().put(IP_LOCATION, node.getIp());
+		pb.start();
+	}
 
-        List<Node> nodeList = cluster.getNode();
-        for (Node node : nodeList) {
-            if (node.getId().equals(nodeid)) {
-                return node;
-            }
-        }
-        StringBuffer buffer = new StringBuffer();
-        buffer.append(EventDriver.CLIENT_NODE.getId() + ",");
-        buffer.append(cluster.getMasterNode().getId() + ",");
-        for (Node v : cluster.getNode()) {
-            buffer.append(v.getId() + ",");
-        }
-        buffer.deleteCharAt(buffer.length() - 1);
-        throw new IllegalArgumentException("Unknown node id :" + nodeid + " valid ids:" + buffer);
-    }
-
-    public static void executeEventScript(Node node, String script, List<String> args, Cluster cluster)
-            throws IOException, InterruptedException {
-        List<String> pargs = new ArrayList<String>();
-        pargs.add("/bin/bash");
-        pargs.add(EventDriver.getHomeDir() + "/" + EXECUTE_SCRIPT);
-        StringBuffer argBuffer = new StringBuffer();
-        String env = EventDriver.getStringifiedEnv(cluster) + " " + IP_LOCATION + "=" + node.getIp();
-        if (args != null) {
-            for (String arg : args) {
-                argBuffer.append(arg + " ");
-            }
-        }
-        ProcessBuilder pb = new ProcessBuilder(pargs);
-        pb.environment().putAll(EventDriver.getEnvironment());
-        pb.environment().put(IP_LOCATION, node.getIp());
-        pb.environment().put(CLUSTER_ENV, env);
-        pb.environment().put(SCRIPT, script);
-        pb.environment().put(ARGS, argBuffer.toString());
-        pb.start();
-    }
-
-    public static void executeLocalScript(Node node, String script, List<String> args) throws IOException,
-            InterruptedException {
-        List<String> pargs = new ArrayList<String>();
-        pargs.add("/bin/bash");
-        pargs.add(script);
-        if (args != null) {
-            pargs.addAll(args);
-        }
-        ProcessBuilder pb = new ProcessBuilder(pargs);
-        pb.environment().putAll(EventDriver.getEnvironment());
-        pb.environment().put(IP_LOCATION, node.getIp());
-        pb.start();
-    }
-
-    public static List<String> getEventArgs(Pattern pattern) {
-        List<String> pargs = new ArrayList<String>();
-        if (pattern.getEvent().getPargs() == null) {
-            return pargs;
-        }
-        String[] args = pattern.getEvent().getPargs().split(" ");
-        for (String arg : args) {
-            pargs.add(arg.trim());
-        }
-        return pargs;
-    }
+	public static List<String> getEventArgs(Pattern pattern) {
+		List<String> pargs = new ArrayList<String>();
+		if (pattern.getEvent().getPargs() == null) {
+			return pargs;
+		}
+		String[] args = pattern.getEvent().getPargs().split(" ");
+		for (String arg : args) {
+			pargs.add(arg.trim());
+		}
+		return pargs;
+	}
 
 }
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventrixClient.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventrixClient.java
index 3792eb8..4cd4b82 100644
--- a/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventrixClient.java
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventrixClient.java
@@ -15,15 +15,21 @@
 package edu.uci.ics.asterix.event.management;
 
 import java.io.File;
+import java.io.FileNotFoundException;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.logging.Logger;
 
+import javax.xml.bind.JAXBContext;
+import javax.xml.bind.JAXBException;
+import javax.xml.bind.Unmarshaller;
+
 import edu.uci.ics.asterix.event.driver.EventDriver;
 import edu.uci.ics.asterix.event.schema.cluster.Cluster;
 import edu.uci.ics.asterix.event.schema.cluster.Node;
+import edu.uci.ics.asterix.event.schema.event.Events;
 import edu.uci.ics.asterix.event.schema.pattern.Event;
 import edu.uci.ics.asterix.event.schema.pattern.Nodeid;
 import edu.uci.ics.asterix.event.schema.pattern.Pattern;
@@ -32,146 +38,166 @@
 
 public class EventrixClient {
 
-    private static final Logger LOGGER = Logger.getLogger(EventrixClient.class.getName());
+	private static final Logger LOGGER = Logger.getLogger(EventrixClient.class
+			.getName());
 
-    private EventTask[] tasks;
-    private boolean dryRun = false;
-    private LinkedBlockingQueue<EventTaskReport> msgInbox = new LinkedBlockingQueue<EventTaskReport>();
-    private AtomicInteger pendingTasks = new AtomicInteger(0);
-    private final Cluster cluster;
-    private IPatternListener listener;
-    private IOutputHandler outputHandler;
+	private EventTask[] tasks;
+	private boolean dryRun = false;
+	private LinkedBlockingQueue<EventTaskReport> msgInbox = new LinkedBlockingQueue<EventTaskReport>();
+	private AtomicInteger pendingTasks = new AtomicInteger(0);
+	private final Cluster cluster;
+	private IPatternListener listener;
+	private IOutputHandler outputHandler;
+	private Events events;
+	private String eventsDir;
 
-    public EventrixClient(Cluster cluster, boolean dryRun, IOutputHandler outputHandler) throws Exception {
-        this.cluster = cluster;
-        this.dryRun = dryRun;
-        this.outputHandler = outputHandler;
-        if (!dryRun) {
-            initializeCluster();
-        }
-    }
+	public EventrixClient(String eventsDir, Cluster cluster, boolean dryRun,
+			IOutputHandler outputHandler) throws Exception {
+		this.eventsDir = eventsDir;
+		this.events = initializeEvents();
+		this.cluster = cluster;
+		this.dryRun = dryRun;
+		this.outputHandler = outputHandler;
+		if (!dryRun) {
+			initializeCluster(eventsDir);
+		}
+	}
 
-    public void submit(Patterns patterns) throws Exception {
-        initTasks(patterns);
-        try {
-            waitForCompletion();
-        } catch (InterruptedException ie) {
-            LOGGER.info("Interrupted exception :" + ie);
-        } catch (Exception e) {
-            throw e;
-        }
+	public void submit(Patterns patterns) throws Exception {
+		initTasks(patterns);
+		try {
+			waitForCompletion();
+		} catch (InterruptedException ie) {
+			LOGGER.info("Interrupted exception :" + ie);
+		} catch (Exception e) {
+			throw e;
+		}
 
-    }
+	}
 
-    public void submit(Patterns patterns, IPatternListener listener) throws Exception {
-        this.listener = listener;
-        initTasks(patterns);
-    }
+	public void submit(Patterns patterns, IPatternListener listener)
+			throws Exception {
+		this.listener = listener;
+		initTasks(patterns);
+	}
 
-    private void initTasks(Patterns patterns) {
-        tasks = new EventTask[patterns.getPattern().size()];
-        pendingTasks.set(tasks.length);
-        int index = 0;
-        for (Pattern pattern : patterns.getPattern()) {
-            tasks[index] = new EventTask(pattern, this);
-            tasks[index].start();
-            index++;
-        }
-    }
+	private void initTasks(Patterns patterns) {
+		tasks = new EventTask[patterns.getPattern().size()];
+		pendingTasks.set(tasks.length);
+		int index = 0;
+		for (Pattern pattern : patterns.getPattern()) {
+			tasks[index] = new EventTask(pattern, this);
+			tasks[index].start();
+			index++;
+		}
+	}
 
-    public Cluster getCluster() {
-        return cluster;
-    }
+	public Cluster getCluster() {
+		return cluster;
+	}
 
-    public boolean isDryRun() {
-        return dryRun;
-    }
+	public boolean isDryRun() {
+		return dryRun;
+	}
 
-    public void notifyCompletion(EventTaskReport report) {
+	public Events getEvents() {
+		return events;
+	}
 
-        if (report.isSuccess()) {
-            pendingTasks.decrementAndGet();
-            if (listener != null) {
-                listener.eventCompleted(report);
-                if (pendingTasks.get() == 0) {
-                    listener.jobCompleted();
-                }
-            } else {
-                try {
-                    msgInbox.put(report);
-                } catch (InterruptedException e) {
-                }
-            }
-        } else {
-            for (EventTask t : tasks) {
-                if (t.getState() == EventTask.State.INITIALIZED || t.getState() == EventTask.State.IN_PROGRESS) {
-                    t.cancel();
-                }
-            }
-            if (listener != null) {
-                listener.jobFailed(report);
-            } else {
-                try {
-                    msgInbox.put(report);
-                } catch (InterruptedException e) {
-                }
-            }
-        }
-    }
+	public String getEventsDir() {
+		return eventsDir;
+	}
 
-    public void notifyFailure(EventTaskReport report) {
+	public synchronized void notifyCompletion(EventTaskReport report) {
 
-    }
+		if (report.isSuccess()) {
+			if (listener != null) {
+				pendingTasks.decrementAndGet();
+				listener.eventCompleted(report);
+				if (pendingTasks.get() == 0) {
+					listener.jobCompleted();
+				}
+			} else {
+				try {
+					msgInbox.put(report);
+				} catch (InterruptedException e) {
+				}
+			}
+		} else {
+			for (EventTask t : tasks) {
+				if (t.getState() == EventTask.State.INITIALIZED
+						|| t.getState() == EventTask.State.IN_PROGRESS) {
+					t.cancel();
+				}
+			}
+			if (listener != null) {
+				listener.jobFailed(report);
+			} else {
+				try {
+					msgInbox.put(report);
+				} catch (InterruptedException e) {
+				}
+			}
+		}
+	}
 
-    private void waitForCompletion() throws Exception {
-        while (true) {
-            EventTaskReport report = msgInbox.take();
-            if (report.isSuccess()) {
-                if (pendingTasks.get() == 0) {
-                    break;
-                }
-            } else {
-                throw new RuntimeException(report.getException().getMessage());
-            }
-        }
-    }
+	private void waitForCompletion() throws Exception {
+		while (true) {
+			EventTaskReport report = msgInbox.take();
+			if (report.isSuccess()) {
+				if (pendingTasks.decrementAndGet() == 0) {
+					break;
+				}
+			} else {
+				throw new RuntimeException(report.getException().getMessage());
+			}
+		}
+	}
 
-    private void initializeCluster() throws Exception {
-        Patterns patterns = initPattern();
-        submit(patterns);
-    }
+	private void initializeCluster(String eventsDir) throws Exception {
+		Patterns patterns = initPattern(eventsDir);
+		submit(patterns);
+	}
 
-    private Patterns initPattern() {
-        Nodeid nodeid = new Nodeid(new Value(null, EventDriver.CLIENT_NODE.getId()));
-        List<Pattern> patternList = new ArrayList<Pattern>();
-        String workingDir = cluster.getWorkingDir().getDir();
-        File eventsPkg = new File(EventDriver.getHomeDir() + File.separator + "events.pkg.tar");
-        if (!eventsPkg.exists()) {
-            throw new IllegalStateException(" Events package does not exist (" + eventsPkg.getAbsolutePath() + ")");
-        }
+	private Patterns initPattern(String eventsDir) {
+		Nodeid nodeid = new Nodeid(new Value(null,
+				EventDriver.CLIENT_NODE.getId()));
+		List<Pattern> patternList = new ArrayList<Pattern>();
+		String workingDir = cluster.getWorkingDir().getDir();
+		String username = cluster.getUsername() == null ? System
+				.getProperty("user.name") : cluster.getUsername();
+		patternList.add(getDirectoryTransferPattern(username, eventsDir,
+				nodeid, cluster.getMasterNode().getIp(), workingDir));
 
-        String username = cluster.getUsername() == null ? System.getProperty("user.name") : cluster.getUsername();
-        patternList.add(getFileTransferPattern(username, eventsPkg.getAbsolutePath(), nodeid, cluster.getMasterNode()
-                .getIp(), workingDir, true));
+		if (!cluster.getWorkingDir().isNFS()) {
+			for (Node node : cluster.getNode()) {
+				patternList.add(getDirectoryTransferPattern(username,
+						eventsDir, nodeid, node.getIp(), workingDir));
+			}
+		}
+		Patterns patterns = new Patterns(patternList);
+		return patterns;
+	}
 
-        if (!cluster.getWorkingDir().isNFS()) {
-            for (Node node : cluster.getNode()) {
-                patternList.add(getFileTransferPattern(username, eventsPkg.getAbsolutePath(), nodeid, node.getIp(),
-                        workingDir, true));
-            }
-        }
-        Patterns patterns = new Patterns(patternList);
-        return patterns;
-    }
+	private Pattern getDirectoryTransferPattern(String username, String src,
+			Nodeid srcNode, String destNodeIp, String destDir) {
+		String pargs = username + " " + src + " " + destNodeIp + " " + destDir;
+		Event event = new Event("directory_transfer", srcNode, pargs);
+		return new Pattern(null, 1, null, event);
+	}
 
-    private Pattern getFileTransferPattern(String username, String src, Nodeid srcNode, String destNodeIp,
-            String destDir, boolean unpack) {
-        String pargs = username + " " + src + " " + destNodeIp + " " + destDir + " " + "unpack";
-        Event event = new Event("file_transfer", srcNode, pargs);
-        return new Pattern(null, 1, null, event);
-    }
+	public IOutputHandler getErrorHandler() {
+		return outputHandler;
+	}
 
-    public IOutputHandler getErrorHandler() {
-        return outputHandler;
-    }
+	private Events initializeEvents() throws JAXBException,
+			FileNotFoundException {
+		File file = new File(eventsDir + File.separator + "events"
+				+ File.separator + "events.xml");
+		JAXBContext eventCtx = JAXBContext.newInstance(Events.class);
+		Unmarshaller unmarshaller = eventCtx.createUnmarshaller();
+		events = (Events) unmarshaller.unmarshal(file);
+		return events;
+	}
+
 }
diff --git a/asterix-events/src/main/resources/events/asterix_deploy/asterix_deploy.sh b/asterix-events/src/main/resources/events/asterix_deploy/asterix_deploy.sh
new file mode 100755
index 0000000..532e559
--- /dev/null
+++ b/asterix-events/src/main/resources/events/asterix_deploy/asterix_deploy.sh
@@ -0,0 +1,11 @@
+MANAGIX_HOME=$1
+HYRACKS_CLI=$MANAGIX_HOME/asterix/hyracks-cli/bin/hyrackscli
+if  ! [ -x $HYRACKS_CLI ]
+then
+     chmod +x $HYRACKS_CLI
+fi
+ASTERIX_ZIP=$2
+HOST=$3
+echo "connect to \"$HOST\";" > temp
+echo "create application asterix \"$ASTERIX_ZIP\";" >> temp 
+($HYRACKS_CLI < temp)  
diff --git a/asterix-events/src/main/resources/events/backup/backup.sh b/asterix-events/src/main/resources/events/backup/backup.sh
new file mode 100755
index 0000000..cff37df
--- /dev/null
+++ b/asterix-events/src/main/resources/events/backup/backup.sh
@@ -0,0 +1,19 @@
+WORKING_DIR=$1
+ASTERIX_INSTANCE_NAME=$2
+ASTERIX_DATA_DIR=$3
+BACKUP_ID=$4
+HDFS_URL=$5
+HADOOP_VERSION=$6
+HDFS_BACKUP_DIR=$7
+NODE_ID=$8
+
+export HADOOP_HOME=$WORKING_DIR/hadoop-$HADOOP_VERSION
+
+nodeStores=$(echo $ASTERIX_DATA_DIR | tr "," "\n")
+for nodeStore in $nodeStores
+do
+  NODE_BACKUP_DIR=$HDFS_BACKUP_DIR/$ASTERIX_INSTANCE_NAME/$BACKUP_ID/$NODE_ID/$nodeStore
+  $HADOOP_HOME/bin/hadoop fs -mkdir $HDFS_URL/$NODE_BACKUP_DIR
+  echo "$HADOOP_HOME/bin/hadoop fs -copyFromLocal $nodeStore/$NODE_ID $HDFS_URL/$NODE_BACKUP_DIR" >> ~/backup.log
+  $HADOOP_HOME/bin/hadoop fs -copyFromLocal $nodeStore/$NODE_ID/$ASTERIX_INSTANCE_NAME/* $HDFS_URL/$NODE_BACKUP_DIR/
+done
diff --git a/asterix-events/src/main/resources/events/cc_failure/cc_failure.sh b/asterix-events/src/main/resources/events/cc_failure/cc_failure.sh
new file mode 100755
index 0000000..0855f5f
--- /dev/null
+++ b/asterix-events/src/main/resources/events/cc_failure/cc_failure.sh
@@ -0,0 +1,6 @@
+#kill -9 `ps -ef  | grep hyracks | grep -v grep | cut -d "/" -f1 | tr -s " " | cut -d " " -f2`
+CC_PARENT_ID_INFO=`ps -ef  | grep hyracks | grep cc_start | grep -v ssh`
+CC_PARENT_ID=`echo $CC_PARENT_ID_INFO | tr -s " " | cut -d " " -f2`
+CC_ID_INFO=`ps -ef | grep hyracks | grep $CC_PARENT_ID | grep -v bash`
+CC_ID=`echo $CC_ID_INFO |  tr -s " " | cut -d " " -f2`
+kill -9 $CC_ID
diff --git a/asterix-events/src/main/resources/events/cc_start/cc_start.sh b/asterix-events/src/main/resources/events/cc_start/cc_start.sh
new file mode 100755
index 0000000..e0b29e0
--- /dev/null
+++ b/asterix-events/src/main/resources/events/cc_start/cc_start.sh
@@ -0,0 +1,9 @@
+if [ ! -d $LOG_DIR ]; 
+then 
+  mkdir -p $LOG_DIR
+fi
+if [ ! -z $1 ];
+then
+  JAVA_OPTS="$JAVA_OPTS -Xdebug -Xrunjdwp:transport=dt_socket,address=$1,server=y,suspend=n"
+fi  
+$HYRACKS_HOME/bin/hyrackscc -client-net-ip-address $CLIENT_NET_IP -client-net-port 1098 -cluster-net-ip-address $CLUSTER_NET_IP -cluster-net-port 1099 -http-port 8888  &> $LOG_DIR/cc.log
diff --git a/asterix-events/src/main/resources/events/events.xml b/asterix-events/src/main/resources/events/events.xml
new file mode 100644
index 0000000..857ee5a
--- /dev/null
+++ b/asterix-events/src/main/resources/events/events.xml
@@ -0,0 +1,100 @@
+<events xmlns="events">
+  <event>
+    <type>node_join</type>
+    <script>node_join/nc_join.sh</script>
+    <description>Creates a NodeController process at a specified location.</description>
+    <args>location_of_cc location(hostname/ip_address) node_controller_id</args>
+    <daemon>true</daemon>
+  </event>
+  <event>
+    <type>node_failure</type>
+    <script>node_failure/nc_failure.sh</script>
+    <description>Kills a NodeController process at a specified location.</description>
+    <args>node_controller_id</args>
+    <daemon>false</daemon>
+  </event>
+  <event>
+    <type>cc_start</type>
+    <script>cc_start/cc_start.sh</script>
+    <description>Starts a  ClusterController process at a specified location.</description>
+    <args></args>
+    <daemon>true</daemon>
+  </event>
+  <event>
+    <type>cc_failure</type>
+    <script>cc_failure/cc_failure.sh</script>
+    <description>Kills the Cluster Controller process running at a specified location.</description>
+    <args></args>
+    <daemon>false</daemon>
+  </event>
+  <event>
+    <type>node_restart</type>
+    <script>node_restart/nc_restart.sh</script>
+    <description>Shuts and restarts a NodeControllerProcess after a specified time interval, at a specified location</description>
+    <args>address of cc, node controller id and sleep interval(seconds)</args>
+    <daemon>true</daemon>
+  </event>
+  <event>
+    <type>asterix_deploy</type>
+    <script>asterix_deploy/asterix_deploy.sh</script>
+    <description>Deploys Asterix application on a cluster running hyracks</description>
+    <args>IP address of the node running the hyracks cluster controller</args>
+    <daemon>false</daemon>
+  </event>
+  <event>
+    <type>zookeeper_start</type>
+    <script>zookeeper/start.sh</script>
+    <description>Launches ZooKeeper server process</description>
+    <args>IP address of the ZooKeeper server</args>
+    <daemon>true</daemon>
+  </event>
+  <event>
+    <type>zookeeper_stop</type>
+    <script>zookeeper/stop.sh</script>
+    <description>Terminates ZooKeeper server process</description>
+    <args>IP address of the ZooKeeper server</args>
+    <daemon>false</daemon>
+  </event>
+  <event>
+    <type>file_transfer</type>
+    <script>file/transfer.sh</script>
+    <description>Copies a file on the local file system to a remote node</description>
+    <args>local_source_path destination_node destination_path</args>
+    <daemon>false</daemon>
+  </event>
+  <event>
+    <type>directory_transfer</type>
+    <script>file/dir_transfer.sh</script>
+    <description>Copies a directory (and its contents) on the local file system to a remote node</description>
+    <args>local_source_path destination_node destination_path</args>
+    <daemon>false</daemon>
+  </event>
+  <event>
+    <type>file_delete</type>
+    <script>file/delete.sh</script>
+    <description>Deletes a file on the local file system to a remote node</description>
+    <args>local_source_path destination_node destination_path</args>
+    <daemon>false</daemon>
+  </event>
+  <event>
+    <type>backup</type>
+    <script>backup/backup.sh</script>
+    <description>Takes a backup of an Asterix instance</description>
+    <args>Asterix_data_dir HDFSurl</args>
+    <daemon>false</daemon>
+  </event>
+  <event>
+    <type>restore</type>
+    <script>restore/restore.sh</script>
+    <description>Restores an Asterix instance from a back up</description>
+    <args>Asterix_data_dir HDFSurl</args>
+    <daemon>false</daemon>
+  </event>
+  <event>
+    <type>hdfs_delete</type>
+    <script>hdfs/delete.sh</script>
+    <description>Deletes an HDFS path</description>
+    <args>WorkingDir HadoopVersion HDFSUrl Path_to_Delete</args>
+    <daemon>false</daemon>
+  </event>
+</events>
diff --git a/asterix-events/src/main/resources/events/file/delete.sh b/asterix-events/src/main/resources/events/file/delete.sh
new file mode 100755
index 0000000..d5ac3ff
--- /dev/null
+++ b/asterix-events/src/main/resources/events/file/delete.sh
@@ -0,0 +1,3 @@
+PATH_TO_DELETE=$1
+echo "rm -rf $PATH_TO_DELETE" >> ~/backup.log
+rm -rf $PATH_TO_DELETE
diff --git a/asterix-events/src/main/resources/events/file/dir_transfer.sh b/asterix-events/src/main/resources/events/file/dir_transfer.sh
new file mode 100755
index 0000000..af7da70
--- /dev/null
+++ b/asterix-events/src/main/resources/events/file/dir_transfer.sh
@@ -0,0 +1,7 @@
+USERNAME=$1
+DIR_TO_TRANSFER=$2
+DEST_HOST=$3
+DEST_DIR=$4
+ssh -l $USERNAME $DEST_HOST "mkdir -p $DEST_DIR"
+echo "scp -r $DIR_TO_TRANSFER $USERNAME@$DEST_HOST:$DEST_DIR/" 
+scp -r $DIR_TO_TRANSFER $USERNAME@$DEST_HOST:$DEST_DIR/
diff --git a/asterix-events/src/main/resources/events/file/transfer.sh b/asterix-events/src/main/resources/events/file/transfer.sh
new file mode 100755
index 0000000..08f0b43
--- /dev/null
+++ b/asterix-events/src/main/resources/events/file/transfer.sh
@@ -0,0 +1,23 @@
+USERNAME=$1
+FILE_TO_TRANSFER=$2
+DEST_HOST=$3
+DEST_DIR=$4
+POST_ACTION=$5
+ssh -l $USERNAME $DEST_HOST "mkdir -p $DEST_DIR"
+echo "scp $FILE_TO_TRANSFER $USERNAME@$DEST_HOST:$DEST_DIR/" 
+scp $FILE_TO_TRANSFER $USERNAME@$DEST_HOST:$DEST_DIR/
+if [ $POST_ACTION == "unpack" ]
+ then 
+ filename=`echo ${FILE_TO_TRANSFER##*/}`
+ fileType=`echo ${FILE_TO_TRANSFER##*.}`
+ if [ $fileType == "tar" ]
+ then 
+   echo "ssh -l $USERNAME $DEST_HOST cd $DEST_DIR && tar xf $filename"
+   ssh -l $USERNAME $DEST_HOST "cd $DEST_DIR && tar xf $filename"
+ else if [ $fileType == "zip" ]
+   then
+     echo "ssh -l $USERNAME $DEST_HOST unzip -o -q -d $DEST_DIR $DEST_DIR/$filename"
+     ssh -l $USERNAME $DEST_HOST "unzip -o -q -d $DEST_DIR $DEST_DIR/$filename"
+   fi 
+ fi
+fi
diff --git a/asterix-events/src/main/resources/events/hdfs/delete.sh b/asterix-events/src/main/resources/events/hdfs/delete.sh
new file mode 100755
index 0000000..6ff54ee
--- /dev/null
+++ b/asterix-events/src/main/resources/events/hdfs/delete.sh
@@ -0,0 +1,7 @@
+WORKING_DIR=$1
+HADOOP_VERSION=$2
+HDFS_URL=$3
+HDFS_PATH=$4
+export HADOOP_HOME=$WORKING_DIR/hadoop-$HADOOP_VERSION
+echo "$HADOOP_HOME/bin/hadoop fs -rmr $HDFS_URL/$HDFS_PATH"
+$HADOOP_HOME/bin/hadoop fs -rmr $HDFS_URL/$HDFS_PATH
diff --git a/asterix-events/src/main/resources/events/node_failure/nc_failure.sh b/asterix-events/src/main/resources/events/node_failure/nc_failure.sh
new file mode 100755
index 0000000..b853be1
--- /dev/null
+++ b/asterix-events/src/main/resources/events/node_failure/nc_failure.sh
@@ -0,0 +1,19 @@
+NC_ID=$1
+
+#if [ $NC_ID == 'ANY' ]
+#then
+#  NC_ID="." 
+#fi 
+#
+#USER=`who am i | tr -s " " | cut -d " " -f1`
+#PARENT_ID=`ps -ef  | tr -s " " | grep nc_join | grep -v grep | grep -v ssh | grep $NC_ID | cut -d " " -f2 | head -n 1` 
+#PID=`ps -ef | tr -s " " | grep hyracks | grep -v grep | grep -v nc_join |  grep $PARENT_ID | cut -d " " -f2 | head -n 1`
+#kill -9 $PID
+#
+
+
+INFO=`ps -ef | grep nc_join | grep -v grep | grep -v ssh| grep $NC_ID | head -n 1`
+PARENT_ID=`echo  $INFO | cut -d " "  -f2`
+PID_INFO=`ps -ef |  grep hyracks | grep -v grep | grep -v nc_join |  grep $PARENT_ID`
+PID=`echo $PID_INFO | cut -d " " -f2`
+kill -9 $PID
diff --git a/asterix-events/src/main/resources/events/node_join/nc_join.sh b/asterix-events/src/main/resources/events/node_join/nc_join.sh
new file mode 100755
index 0000000..62d71ce
--- /dev/null
+++ b/asterix-events/src/main/resources/events/node_join/nc_join.sh
@@ -0,0 +1,11 @@
+CC_HOST=$1
+NC_ID=$2
+if [ ! -d $LOG_DIR ]; 
+then 
+  mkdir -p $LOG_DIR
+fi
+if [ ! -z $3 ];
+then
+  JAVA_OPTS="$JAVA_OPTS -Xdebug -Xrunjdwp:transport=dt_socket,address=$3,server=y,suspend=n"
+fi  
+$HYRACKS_HOME/bin/hyracksnc -node-id $NC_ID -cc-host $CC_HOST -cc-port 1099 -cluster-net-ip-address $IP_LOCATION  -data-ip-address $IP_LOCATION &> $LOG_DIR/${NC_ID}.log
diff --git a/asterix-events/src/main/resources/events/node_restart/nc_restart.sh b/asterix-events/src/main/resources/events/node_restart/nc_restart.sh
new file mode 100755
index 0000000..961ce8d
--- /dev/null
+++ b/asterix-events/src/main/resources/events/node_restart/nc_restart.sh
@@ -0,0 +1,21 @@
+CC_HOST=$1
+NC_ID=$2
+SLEEP_TIME=$3
+
+if [ $NC_ID == 'ANY' ]
+then
+  NC_ID="." 
+  PARENT_ID=`ps -ej | tr -s " " | grep nc_join | grep -v grep | grep -v ssh |  cut -d " " -f2 | head -n 1` 
+  PARENT_PROCESS_ENTRY=`ps -ef | grep $PARENT_ID | grep -v grep   | head -n 1`
+  NC_ID=`echo ${PARENT_PROCESS_ENTRY##* }`
+  echo "NCid is $NC_ID" >> ~/try.txt
+else 
+  PARENT_ID=`ps -ej | tr -s " " | grep nc_join | grep -v grep | grep -v ssh | grep $NC_ID | cut -d " " -f2 | head -n 1` 
+fi 
+
+PID=`ps -ej | tr -s " " | grep hyracks | grep -v grep | grep -v nc_join |  grep $PARENT_ID | cut -d " " -f2 | head -n 1`
+kill -9 $PID
+
+sleep $3
+
+$HYRACKS_HOME/hyracks-server/target/hyracks-server-0.2.2-SNAPSHOT-binary-assembly/bin/hyracksnc -node-id $NC_ID -cc-host $CC_HOST -cc-port 1099 -cluster-net-ip-address $IP_LOCATION  -data-ip-address $IP_LOCATION
diff --git a/asterix-events/src/main/resources/events/restore/restore.sh b/asterix-events/src/main/resources/events/restore/restore.sh
new file mode 100755
index 0000000..96f3db4
--- /dev/null
+++ b/asterix-events/src/main/resources/events/restore/restore.sh
@@ -0,0 +1,25 @@
+WORKING_DIR=$1
+ASTERIX_INSTANCE_NAME=$2
+ASTERIX_DATA_DIR=$3
+BACKUP_ID=$4
+HDFS_URL=$5
+HADOOP_VERSION=$6
+HDFS_BACKUP_DIR=$7
+NODE_ID=$8
+
+export HADOOP_HOME=$WORKING_DIR/hadoop-$HADOOP_VERSION
+
+nodeStores=$(echo $ASTERIX_DATA_DIR | tr "," "\n")
+for nodeStore in $nodeStores
+do
+  NODE_BACKUP_DIR=$HDFS_BACKUP_DIR/$ASTERIX_INSTANCE_NAME/$BACKUP_ID/$NODE_ID/$nodeStore
+  DEST_DIR=$nodeStore/$NODE_ID/$ASTERIX_INSTANCE_NAME
+  if [ ! -d $DEST_DIR ]
+  then 
+    mkdir -p $DEST_DIR
+  else 
+    rm -rf $DEST_DIR/*
+  fi
+  echo "$HADOOP_HOME/bin/hadoop fs -copyToLocal $HDFS_URL/$NODE_BACKUP_DIR/  $DEST_DIR/" >> ~/restore.log 
+  $HADOOP_HOME/bin/hadoop fs -copyToLocal $HDFS_URL/$NODE_BACKUP_DIR/*  $DEST_DIR/ 
+done
diff --git a/asterix-events/src/main/resources/scripts/execute.sh b/asterix-events/src/main/resources/scripts/execute.sh
index 114a12a..72234c1 100755
--- a/asterix-events/src/main/resources/scripts/execute.sh
+++ b/asterix-events/src/main/resources/scripts/execute.sh
@@ -1,16 +1,22 @@
-#!/bin/bash
-val=0
-line=""
-for x in $@
-do
- if [[ $val == 0 ]]
- then
-    line="$x="
-    val=1
- else
-    msg="$line$x"
-    echo $line >> envr
-    val=0
- fi
-done
-cat ./envr
+USERNAME=$1
+if [ $DAEMON == "false" ]; then 
+  if [ -z $USERNAME ]
+  then
+    cmd_output=$(ssh $IP_LOCATION "$ENV $SCRIPT $ARGS" 2>&1 >/dev/null) 
+    echo "ssh $IP_LOCATION $ENV $SCRIPT $ARGS" >> ./execute.log
+    echo "$cmd_output"
+  else
+    echo "ssh -l $USERNAME $IP_LOCATION $ENV $SCRIPT $ARGS" >> ./execute.log
+    cmd_output=$(ssh -l $USERNAME $IP_LOCATION "$ENV $SCRIPT $ARGS" 2>&1 >/dev/null) 
+    echo "$cmd_output"
+  fi  
+else 
+  if [ -z $USERNAME ];
+  then
+     echo "ssh $IP_LOCATION $ENV $SCRIPT $ARGS &" >> ./execute.log
+     ssh $IP_LOCATION "$ENV $SCRIPT $ARGS" &
+  else
+     echo "ssh -l $USERNAME $IP_LOCATION $ENV $SCRIPT $ARGS &" >> ./execute.log
+     ssh -l $USERNAME $IP_LOCATION "$ENV $SCRIPT $ARGS" &
+  fi   
+fi
diff --git a/asterix-installer/pom.xml b/asterix-installer/pom.xml
index 98ca7a2..f4a1f9b 100644
--- a/asterix-installer/pom.xml
+++ b/asterix-installer/pom.xml
@@ -60,6 +60,21 @@
             </execution>
           </executions>
        </plugin>
+       <plugin>
+         <artifactId>maven-assembly-plugin</artifactId>
+         <version>2.2-beta-2</version>
+         <executions>
+           <execution>
+             <configuration>
+               <descriptor>src/main/assembly/binary-assembly.xml</descriptor>
+             </configuration>
+             <phase>package</phase>
+             <goals>
+               <goal>attached</goal>
+             </goals>
+           </execution>
+         </executions>
+       </plugin>
     </plugins> 
   </build>
 
diff --git a/asterix-installer/src/main/assembly/binary-assembly.xml b/asterix-installer/src/main/assembly/binary-assembly.xml
new file mode 100644
index 0000000..9788d2c
--- /dev/null
+++ b/asterix-installer/src/main/assembly/binary-assembly.xml
@@ -0,0 +1,125 @@
+<assembly>
+  <id>binary-assembly</id>
+  <formats>
+    <format>zip</format>
+  </formats>
+  <includeBaseDirectory>false</includeBaseDirectory>
+  <fileSets>
+    <fileSet>
+      <directory>src/main/resources/conf</directory>
+      <outputDirectory>conf</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>src/main/resources/clusters</directory>
+      <outputDirectory>clusters</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>src/main/resources/zookeeper</directory>
+      <fileMode>0755</fileMode>
+      <outputDirectory>.managix/zookeeper/bin</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>src/main/resources/scripts</directory>
+      <fileMode>0755</fileMode>
+      <includes>
+        <include>managix</include>
+      </includes>
+      <outputDirectory>bin</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>src/main/resources/scripts</directory>
+      <fileMode>0755</fileMode>
+      <excludes>
+        <exclude>managix</exclude>
+      </excludes>
+      <outputDirectory>.managix/scripts</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>src/main/resources/hadoop-0.20.2</directory>
+      <outputDirectory>.managix/hadoop-0.20.2</outputDirectory>
+      <fileMode>0755</fileMode>
+    </fileSet>
+    <fileSet>
+      <directory>target</directory>
+      <outputDirectory>lib</outputDirectory>
+      <includes>
+        <include>*.jar</include>
+      </includes>
+    </fileSet>
+    <fileSet>
+      <directory>../asterix-events/src/main/resources/events</directory>
+      <outputDirectory>.managix/eventrix/events</outputDirectory>
+      <fileMode>0755</fileMode>
+    </fileSet>
+    <fileSet>
+      <directory>../asterix-events/src/main/resources/scripts</directory>
+      <outputDirectory>.managix/eventrix/scripts</outputDirectory>
+    </fileSet>
+  </fileSets>
+  <dependencySets>
+    <dependencySet>
+      <includes>
+         <include>log4j:log4j</include>
+         <include>commons-lang:commons-lang</include>
+         <include>edu.uci.ics.asterix:asterix-events</include>
+         <include>org.apache.zookeeper:zookeeper</include>
+         <include>args4j:args4j</include>
+         <include>log4j:log4j</include>
+         <include>commons-io:commons-io</include>
+         <include>org.slf4j:slf4j-api</include>
+         <include>org.slf4j:slf4j-log4j12</include>
+       </includes>
+       <unpack>false</unpack>
+       <outputDirectory>lib</outputDirectory>
+    </dependencySet>
+    <dependencySet>
+      <includes>
+        <include>org.apache.hadoop:hadoop-core</include>
+        <include>commons-cli:commons-cli</include>
+        <include>commons-logging:commons-logging</include>
+      </includes>
+      <unpack>false</unpack>
+      <outputDirectory>.managix/hadoop-0.20.2/lib</outputDirectory>
+    </dependencySet>
+    <dependencySet>
+      <includes>
+        <include>org.apache.zookeeper:zookeeper</include>
+        <include>log4j:log4j</include>
+        <include>org.slf4j:slf4j-api</include>
+      </includes>
+      <unpack>false</unpack>
+      <outputDirectory>.managix/zookeeper/lib</outputDirectory>
+    </dependencySet>
+    <dependencySet>
+      <outputDirectory>asterix</outputDirectory>
+      <includes>
+        <include>hyracks-server*</include>
+      </includes>
+      <unpack>false</unpack>
+      <useTransitiveDependencies>false</useTransitiveDependencies>
+    </dependencySet>
+    <dependencySet>
+      <outputDirectory>asterix</outputDirectory>
+      <includes>
+        <include>hyracks-cli*</include>
+      </includes>
+      <unpack>false</unpack>
+      <useTransitiveDependencies>false</useTransitiveDependencies>
+    </dependencySet>
+    <dependencySet>
+      <outputDirectory>asterix</outputDirectory>
+      <includes>
+        <include>asterix-app*</include>
+      </includes>
+      <useTransitiveDependencies>false</useTransitiveDependencies>
+    </dependencySet>
+    <dependencySet>
+      <outputDirectory>.managix/eventrix</outputDirectory>
+      <includes>
+        <include>asterix-events*</include>
+      </includes>
+      <unpack>false</unpack>
+      <useTransitiveDependencies>false</useTransitiveDependencies>
+    </dependencySet>
+  </dependencySets>
+</assembly>
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/AbstractCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/AbstractCommand.java
index ed2e2ff..2e199fb 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/AbstractCommand.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/AbstractCommand.java
@@ -23,6 +23,8 @@
     protected static final Logger LOGGER = Logger.getLogger(AbstractCommand.class.getName());
 
     protected CommandConfig config;
+    
+    protected String usageDescription;
 
     public void execute(String[] args) throws Exception {
         String[] cmdArgs = new String[args.length - 1];
@@ -35,5 +37,7 @@
 
     abstract protected void execCommand() throws Exception;
 
+    abstract protected String getUsageDescription();
+
     abstract protected CommandConfig getCommandConfig();
 }
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/AlterCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/AlterCommand.java
index 6dee399..f5450ff 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/AlterCommand.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/AlterCommand.java
@@ -45,6 +45,12 @@
         return new AlterConfig();
     }
 
+    @Override
+    protected String getUsageDescription() {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
 }
 
 class AlterConfig implements CommandConfig {
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/BackupCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/BackupCommand.java
index 9a0f7c7..f8dcc80 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/BackupCommand.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/BackupCommand.java
@@ -19,14 +19,12 @@
 
 import org.kohsuke.args4j.Option;
 
-import edu.uci.ics.asterix.event.driver.EventDriver;
 import edu.uci.ics.asterix.event.schema.pattern.Patterns;
 import edu.uci.ics.asterix.installer.driver.ManagixUtil;
-import edu.uci.ics.asterix.installer.error.OutputHandler;
 import edu.uci.ics.asterix.installer.events.PatternCreator;
 import edu.uci.ics.asterix.installer.model.AsterixInstance;
-import edu.uci.ics.asterix.installer.model.BackupInfo;
 import edu.uci.ics.asterix.installer.model.AsterixInstance.State;
+import edu.uci.ics.asterix.installer.model.BackupInfo;
 import edu.uci.ics.asterix.installer.service.ServiceProvider;
 
 public class BackupCommand extends AbstractCommand {
@@ -38,7 +36,7 @@
         List<BackupInfo> backupInfo = instance.getBackupInfo();
         PatternCreator pc = new PatternCreator();
         Patterns patterns = pc.getBackUpAsterixPattern(instance, ((BackupConfig) config).localPath);
-        EventDriver.getClient(instance.getCluster(), false, OutputHandler.INSTANCE).submit(patterns);
+        ManagixUtil.getEventrixClient(instance.getCluster()).submit(patterns);
         int backupId = backupInfo.size();
         BackupInfo binfo = new BackupInfo(backupId, new Date());
         backupInfo.add(binfo);
@@ -51,6 +49,12 @@
         return new BackupConfig();
     }
 
+    @Override
+    protected String getUsageDescription() {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
 }
 
 class BackupConfig implements CommandConfig {
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/CreateCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/CreateCommand.java
index 341baa8..5a3da5b 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/CreateCommand.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/CreateCommand.java
@@ -23,14 +23,12 @@
 
 import org.kohsuke.args4j.Option;
 
-import edu.uci.ics.asterix.event.driver.EventDriver;
 import edu.uci.ics.asterix.event.schema.cluster.Cluster;
 import edu.uci.ics.asterix.event.schema.cluster.Env;
 import edu.uci.ics.asterix.event.schema.cluster.Property;
 import edu.uci.ics.asterix.event.schema.pattern.Patterns;
 import edu.uci.ics.asterix.installer.driver.ManagixDriver;
 import edu.uci.ics.asterix.installer.driver.ManagixUtil;
-import edu.uci.ics.asterix.installer.error.OutputHandler;
 import edu.uci.ics.asterix.installer.error.VerificationUtil;
 import edu.uci.ics.asterix.installer.events.PatternCreator;
 import edu.uci.ics.asterix.installer.model.AsterixInstance;
@@ -65,7 +63,7 @@
 
         PatternCreator pc = new PatternCreator();
         Patterns patterns = pc.getStartAsterixPattern(asterixInstanceName, cluster);
-        EventDriver.getClient(cluster, false, OutputHandler.INSTANCE).submit(patterns);
+        ManagixUtil.getEventrixClient(cluster).submit(patterns);
 
         AsterixRuntimeState runtimeState = VerificationUtil.getAsterixRuntimeState(asterixInstance);
         VerificationUtil.updateInstanceWithRuntimeDescription(asterixInstance, runtimeState, true);
@@ -88,6 +86,12 @@
     public String getAsterixInstanceName() {
         return asterixInstanceName;
     }
+
+    @Override
+    protected String getUsageDescription() {
+        // TODO Auto-generated method stub
+        return null;
+    }
 }
 
 class CreateConfig implements CommandConfig {
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/DeleteCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/DeleteCommand.java
index 3286575..60ce4c1 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/DeleteCommand.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/DeleteCommand.java
@@ -18,10 +18,8 @@
 
 import org.kohsuke.args4j.Option;
 
-import edu.uci.ics.asterix.event.driver.EventDriver;
 import edu.uci.ics.asterix.event.schema.pattern.Patterns;
 import edu.uci.ics.asterix.installer.driver.ManagixUtil;
-import edu.uci.ics.asterix.installer.error.OutputHandler;
 import edu.uci.ics.asterix.installer.events.PatternCreator;
 import edu.uci.ics.asterix.installer.model.AsterixInstance;
 import edu.uci.ics.asterix.installer.model.AsterixInstance.State;
@@ -35,7 +33,7 @@
         AsterixInstance instance = ManagixUtil.validateAsterixInstanceExists(asterixInstanceName, State.INACTIVE);
         PatternCreator pc = new PatternCreator();
         Patterns patterns = pc.createDeleteInstancePattern(instance);
-        EventDriver.getClient(instance.getCluster(), false, OutputHandler.INSTANCE).submit(patterns);
+        ManagixUtil.getEventrixClient(instance.getCluster()).submit(patterns);
         ServiceProvider.INSTANCE.getLookupService().removeAsterixInstance(asterixInstanceName);
         LOGGER.log(Level.INFO, " Asterix instance: " + asterixInstanceName + " deleted");
     }
@@ -45,6 +43,12 @@
         return new DeleteConfig();
     }
 
+    @Override
+    protected String getUsageDescription() {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
 }
 
 class DeleteConfig implements CommandConfig {
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/DescribeCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/DescribeCommand.java
index c2713a3..6b6536a 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/DescribeCommand.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/DescribeCommand.java
@@ -22,8 +22,8 @@
 import edu.uci.ics.asterix.installer.error.ManagixException;
 import edu.uci.ics.asterix.installer.error.VerificationUtil;
 import edu.uci.ics.asterix.installer.model.AsterixInstance;
-import edu.uci.ics.asterix.installer.model.AsterixRuntimeState;
 import edu.uci.ics.asterix.installer.model.AsterixInstance.State;
+import edu.uci.ics.asterix.installer.model.AsterixRuntimeState;
 import edu.uci.ics.asterix.installer.service.ServiceProvider;
 
 public class DescribeCommand extends AbstractCommand {
@@ -70,6 +70,12 @@
         return new DescribeConfig();
     }
 
+    @Override
+    protected String getUsageDescription() {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
 }
 
 class DescribeConfig implements CommandConfig {
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/RestoreCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/RestoreCommand.java
index 141fc03..620c88f 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/RestoreCommand.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/RestoreCommand.java
@@ -16,10 +16,8 @@
 
 import org.kohsuke.args4j.Option;
 
-import edu.uci.ics.asterix.event.driver.EventDriver;
 import edu.uci.ics.asterix.event.schema.pattern.Patterns;
 import edu.uci.ics.asterix.installer.driver.ManagixUtil;
-import edu.uci.ics.asterix.installer.error.OutputHandler;
 import edu.uci.ics.asterix.installer.events.PatternCreator;
 import edu.uci.ics.asterix.installer.model.AsterixInstance;
 import edu.uci.ics.asterix.installer.model.AsterixInstance.State;
@@ -36,7 +34,7 @@
         }
         PatternCreator pc = new PatternCreator();
         Patterns patterns = pc.getRestoreAsterixPattern(instance, backupId);
-        EventDriver.getClient(instance.getCluster(), false, OutputHandler.INSTANCE).submit(patterns);
+        ManagixUtil.getEventrixClient(instance.getCluster()).submit(patterns);
     }
 
     @Override
@@ -44,6 +42,12 @@
         return new RestoreConfig();
     }
 
+    @Override
+    protected String getUsageDescription() {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
 }
 
 class RestoreConfig implements CommandConfig {
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/StartCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/StartCommand.java
index 3aabb75..348d59e 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/StartCommand.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/StartCommand.java
@@ -18,16 +18,14 @@
 
 import org.kohsuke.args4j.Option;
 
-import edu.uci.ics.asterix.event.driver.EventDriver;
 import edu.uci.ics.asterix.event.schema.pattern.Patterns;
 import edu.uci.ics.asterix.installer.driver.ManagixDriver;
 import edu.uci.ics.asterix.installer.driver.ManagixUtil;
-import edu.uci.ics.asterix.installer.error.OutputHandler;
 import edu.uci.ics.asterix.installer.error.VerificationUtil;
 import edu.uci.ics.asterix.installer.events.PatternCreator;
 import edu.uci.ics.asterix.installer.model.AsterixInstance;
-import edu.uci.ics.asterix.installer.model.AsterixRuntimeState;
 import edu.uci.ics.asterix.installer.model.AsterixInstance.State;
+import edu.uci.ics.asterix.installer.model.AsterixRuntimeState;
 import edu.uci.ics.asterix.installer.service.ServiceProvider;
 
 public class StartCommand extends AbstractCommand {
@@ -39,9 +37,9 @@
         ManagixUtil.createAsterixZip(instance, false);
         PatternCreator pc = new PatternCreator();
         Patterns patterns = pc.getStartAsterixPattern(asterixInstanceName, instance.getCluster());
-        EventDriver.getClient(instance.getCluster(), false, OutputHandler.INSTANCE).submit(patterns);
-        ManagixUtil.deleteDirectory(ManagixDriver.getManagixHome() + File.separator + ManagixDriver.ASTERIX_DIR
-                + File.separator + asterixInstanceName);
+        ManagixUtil.getEventrixClient(instance.getCluster()).submit(patterns);
+       // ManagixUtil.deleteDirectory(ManagixDriver.getManagixHome() + File.separator + ManagixDriver.ASTERIX_DIR
+         //       + File.separator + asterixInstanceName);
         AsterixRuntimeState runtimeState = VerificationUtil.getAsterixRuntimeState(instance);
         VerificationUtil.updateInstanceWithRuntimeDescription(instance, runtimeState, true);
         System.out.println(instance.getDescription(false));
@@ -53,6 +51,12 @@
         return new StartConfig();
     }
 
+    @Override
+    protected String getUsageDescription() {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
 }
 
 class StartConfig implements CommandConfig {
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/StopCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/StopCommand.java
index 441b63c..d82d751 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/StopCommand.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/StopCommand.java
@@ -20,13 +20,11 @@
 
 import org.kohsuke.args4j.Option;
 
-import edu.uci.ics.asterix.event.driver.EventDriver;
 import edu.uci.ics.asterix.event.management.EventrixClient;
 import edu.uci.ics.asterix.event.schema.cluster.Node;
 import edu.uci.ics.asterix.event.schema.pattern.Pattern;
 import edu.uci.ics.asterix.event.schema.pattern.Patterns;
 import edu.uci.ics.asterix.installer.driver.ManagixUtil;
-import edu.uci.ics.asterix.installer.error.OutputHandler;
 import edu.uci.ics.asterix.installer.events.PatternCreator;
 import edu.uci.ics.asterix.installer.model.AsterixInstance;
 import edu.uci.ics.asterix.installer.model.AsterixInstance.State;
@@ -46,7 +44,7 @@
         for (Node node : asterixInstance.getCluster().getNode()) {
             patternsToExecute.add(pc.createNCStopPattern(node.getId(), asterixInstanceName + "_" + node.getId()));
         }
-        EventrixClient client = EventDriver.getClient(asterixInstance.getCluster(), false, OutputHandler.INSTANCE);
+        EventrixClient client = ManagixUtil.getEventrixClient(asterixInstance.getCluster());
         try {
             client.submit(new Patterns(patternsToExecute));
         } catch (Exception e) {
@@ -66,6 +64,12 @@
         return ((StopConfig) config).name;
     }
 
+    @Override
+    protected String getUsageDescription() {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
 }
 
 class StopConfig implements CommandConfig {
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/driver/ManagixDriver.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/driver/ManagixDriver.java
index c79ac61..1474ca9 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/driver/ManagixDriver.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/driver/ManagixDriver.java
@@ -22,7 +22,6 @@
 import javax.xml.bind.JAXBContext;
 import javax.xml.bind.Unmarshaller;
 
-import edu.uci.ics.asterix.event.driver.EventDriver;
 import edu.uci.ics.asterix.event.schema.event.Events;
 import edu.uci.ics.asterix.installer.command.CommandHandler;
 import edu.uci.ics.asterix.installer.schema.conf.Configuration;
@@ -33,20 +32,23 @@
 public class ManagixDriver {
 
     public static final String MANAGIX_INTERNAL_DIR = ".managix";
+    public static final String MANAGIX_EVENT_DIR = MANAGIX_INTERNAL_DIR + File.separator + "eventrix";
+    public static final String MANAGIX_EVENT_SCRIPTS_DIR = MANAGIX_INTERNAL_DIR + File.separator + "eventrix"
+            + File.separator + "scripts";
+
     public static final String ASTERIX_DIR = "asterix";
     public static final String EVENTS_DIR = "events";
 
     private static final Logger LOGGER = Logger.getLogger(ManagixDriver.class.getName());
     private static final String ENV_MANAGIX_HOME = "MANAGIX_HOME";
     private static final String MANAGIX_CONF_XML = "conf" + File.separator + "managix-conf.xml";
-    private static final String MANAGIX_EVENTS_XML = MANAGIX_INTERNAL_DIR + File.separator + EVENTS_DIR
-            + File.separator + "events.pkg" + File.separator + "events.xml";
 
     private static Configuration conf;
     private static String managixHome;
     private static String hyracksServerZip;
     private static String hyracksClientZip;
     private static String asterixZip;
+    private static Events events;
 
     public static String getHyrackServerZip() {
         return hyracksServerZip;
@@ -75,13 +77,6 @@
         Unmarshaller unmarshaller = configCtx.createUnmarshaller();
         conf = (Configuration) unmarshaller.unmarshal(configFile);
 
-        File managixEventFile = new File(managixHome + File.separator + MANAGIX_EVENTS_XML);
-        JAXBContext eventCtx = JAXBContext.newInstance(Events.class);
-        unmarshaller = eventCtx.createUnmarshaller();
-        Events events = (Events) unmarshaller.unmarshal(managixEventFile);
-        EventDriver.setHomeDir(managixHome + File.separator + MANAGIX_INTERNAL_DIR + File.separator + EVENTS_DIR);
-        EventDriver.setEvents(events);
-
         hyracksServerZip = initBinary("hyracks-server");
         hyracksClientZip = initBinary("hyracks-cli");
         ManagixUtil.unzip(hyracksClientZip, getHyracksClientHome());
@@ -123,6 +118,10 @@
         return managixHome + File.separator + ASTERIX_DIR;
     }
 
+    public static Events getEvents() {
+        return events;
+    }
+
     public static void main(String args[]) {
         try {
             if (args.length != 0) {
@@ -136,6 +135,7 @@
             LOGGER.log(Level.SEVERE, "Unknown command");
             printUsage();
         } catch (Exception e) {
+            e.printStackTrace();
             LOGGER.log(Level.SEVERE, e.getMessage());
         }
     }
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/driver/ManagixUtil.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/driver/ManagixUtil.java
index 85dcf1e..f849151 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/driver/ManagixUtil.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/driver/ManagixUtil.java
@@ -40,9 +40,11 @@
 import org.apache.commons.io.IOUtils;
 
 import edu.uci.ics.asterix.event.driver.EventDriver;
+import edu.uci.ics.asterix.event.management.EventrixClient;
 import edu.uci.ics.asterix.event.schema.cluster.Cluster;
 import edu.uci.ics.asterix.event.schema.cluster.Node;
 import edu.uci.ics.asterix.installer.error.ManagixException;
+import edu.uci.ics.asterix.installer.error.OutputHandler;
 import edu.uci.ics.asterix.installer.model.AsterixInstance;
 import edu.uci.ics.asterix.installer.model.AsterixInstance.State;
 import edu.uci.ics.asterix.installer.service.ServiceProvider;
@@ -121,13 +123,12 @@
             if (nodeDataStore.length() == 0) {
                 throw new IllegalStateException(" Store not defined for node " + node.getId());
             }
-            conf.append(asterixInstanceName + "_" + node.getId() + ".stores" + "=" + nodeDataStore
-                    + "\n");
+            conf.append(asterixInstanceName + "_" + node.getId() + ".stores" + "=" + nodeDataStore + "\n");
 
         }
         Properties asterixConfProp = asterixInstance.getConfiguration();
-        String outpuDir = asterixConfProp.getProperty("output_dir");
-        conf.append("OutputDir=" + outpuDir);
+        String outputDir = asterixConfProp.getProperty("output_dir");
+        conf.append("OutputDir=" + outputDir);
         File asterixConfDir = new File(ManagixDriver.getAsterixDir() + File.separator + asterixInstanceName);
         asterixConfDir.mkdirs();
         dumpToFile(ManagixDriver.getAsterixDir() + File.separator + asterixInstanceName + File.separator
@@ -297,4 +298,9 @@
         return writer.toString();
     }
 
+    public static EventrixClient getEventrixClient(Cluster cluster) throws Exception {
+        return new EventrixClient(ManagixDriver.getManagixHome() + File.separator + ManagixDriver.MANAGIX_EVENT_DIR,
+                cluster, false, OutputHandler.INSTANCE);
+    }
+
 }
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/error/VerificationUtil.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/error/VerificationUtil.java
index f4634a5..3b157ec 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/error/VerificationUtil.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/error/VerificationUtil.java
@@ -23,9 +23,9 @@
 import edu.uci.ics.asterix.installer.driver.ManagixDriver;
 import edu.uci.ics.asterix.installer.driver.ManagixUtil;
 import edu.uci.ics.asterix.installer.model.AsterixInstance;
+import edu.uci.ics.asterix.installer.model.AsterixInstance.State;
 import edu.uci.ics.asterix.installer.model.AsterixRuntimeState;
 import edu.uci.ics.asterix.installer.model.ProcessInfo;
-import edu.uci.ics.asterix.installer.model.AsterixInstance.State;
 
 public class VerificationUtil {
 
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/events/PatternCreator.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/events/PatternCreator.java
index 8fe00ab..74bdbc2 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/events/PatternCreator.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/events/PatternCreator.java
@@ -21,7 +21,6 @@
 
 import edu.uci.ics.asterix.event.driver.EventDriver;
 import edu.uci.ics.asterix.event.schema.cluster.Cluster;
-import edu.uci.ics.asterix.event.schema.cluster.MasterNode;
 import edu.uci.ics.asterix.event.schema.cluster.Node;
 import edu.uci.ics.asterix.event.schema.pattern.Delay;
 import edu.uci.ics.asterix.event.schema.pattern.Event;
@@ -69,12 +68,12 @@
             }
             Pattern createNC = createNCStartPattern(cluster.getMasterNode().getIp(), node.getId(), asterixInstanceName
                     + "_" + node.getId());
-            addInitialDelay(createNC, 3, "sec");
+            addInitialDelay(createNC, 4, "sec");
             ps.add(createNC);
         }
 
         Pattern asterixDeploy = createAsterixDeployPattern(asterixInstanceName, cluster);
-        addInitialDelay(asterixDeploy, 4, "sec");
+        addInitialDelay(asterixDeploy, 6, "sec");
         ps.add(asterixDeploy);
 
         Patterns patterns = new Patterns(ps);
@@ -151,17 +150,17 @@
         List<Pattern> patternList = new ArrayList<Pattern>();
         String workingDir = cluster.getWorkingDir().getDir();
         String hadoopVersion = ManagixDriver.getConfiguration().getBackup().getHdfs().getVersion();
-        File hadoopTar = new File(ManagixDriver.getManagixHome() + File.separator + ManagixDriver.MANAGIX_INTERNAL_DIR
-                + File.separator + "hadoop" + File.separator + "hadoop-" + hadoopVersion + ".tar");
-        if (!hadoopTar.exists()) {
+        File hadoopDir = new File(ManagixDriver.getManagixHome() + File.separator + ManagixDriver.MANAGIX_INTERNAL_DIR
+                + File.separator + "hadoop-" + hadoopVersion);
+        if (!hadoopDir.exists()) {
             throw new IllegalStateException("Hadoop version :" + hadoopVersion + " not supported");
         }
 
         Nodeid nodeid = new Nodeid(new Value(null, EventDriver.CLIENT_NODE.getId()));
         String username = cluster.getUsername() != null ? cluster.getUsername() : System.getProperty("user.name");
-        String pargs = username + " " + hadoopTar.getAbsolutePath() + " " + cluster.getMasterNode().getIp() + " "
-                + workingDir + " " + "unpack";
-        Event event = new Event("file_transfer", nodeid, pargs);
+        String pargs = username + " " + hadoopDir.getAbsolutePath() + " " + cluster.getMasterNode().getIp() + " "
+                + workingDir;
+        Event event = new Event("directory_transfer", nodeid, pargs);
         Pattern p = new Pattern(null, 1, null, event);
         addInitialDelay(p, 2, "sec");
         patternList.add(p);
@@ -170,9 +169,9 @@
         if (copyToNC) {
             for (Node node : cluster.getNode()) {
                 nodeid = new Nodeid(new Value(null, node.getId()));
-                pargs = cluster.getUsername() + " " + hadoopTar.getAbsolutePath() + " " + node.getIp() + " "
-                        + workingDir + " " + "unpack";
-                event = new Event("file_transfer", nodeid, pargs);
+                pargs = cluster.getUsername() + " " + hadoopDir.getAbsolutePath() + " " + node.getIp() + " "
+                        + workingDir;
+                event = new Event("directory_transfer", nodeid, pargs);
                 p = new Pattern(null, 1, null, event);
                 addInitialDelay(p, 2, "sec");
                 patternList.add(p);
@@ -212,19 +211,24 @@
     private Patterns createRemoveAsterixStoragePattern(AsterixInstance instance) throws Exception {
         List<Pattern> patternList = new ArrayList<Pattern>();
         Cluster cluster = instance.getCluster();
-        String clusterStore = cluster.getStore();
-        String nodeStore;
-        String pargs;
+        String pargs = null;
 
         for (Node node : cluster.getNode()) {
             Nodeid nodeid = new Nodeid(new Value(null, node.getId()));
-            nodeStore = node.getStore() == null ? clusterStore + File.separator + node.getId() : node.getStore();
-            String[] nodeStores = nodeStore.split(",");
-            for (String ns : nodeStores) {
-                pargs = ns + File.separator + node.getId() + instance.getName();
-                Event event = new Event("file_delete", nodeid, pargs);
-                patternList.add(new Pattern(null, 1, null, event));
+            String[] nodeStores;
+            if (node.getStore() != null) {
+                nodeStores = node.getStore().trim().split(",");
+                for (String ns : nodeStores) {
+                    pargs = ns + File.separator + instance.getName();
+                }
+            } else {
+                nodeStores = cluster.getStore().trim().split(",");
+                for (String ns : nodeStores) {
+                    pargs = ns + File.separator + node.getId() + File.separator + instance.getName();
+                }
             }
+            Event event = new Event("file_delete", nodeid, pargs);
+            patternList.add(new Pattern(null, 1, null, event));
         }
         Patterns patterns = new Patterns(patternList);
         return patterns;
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/model/EventList.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/model/EventList.java
index 8d1486d..4f5628f 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/model/EventList.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/model/EventList.java
@@ -24,6 +24,8 @@
         ASTERIX_DEPLOY,
         BACKUP,
         RESTORE,
-        FILE_TRANSFER
+        FILE_DELETE,
+        FILE_TRANSFER,
+        DIRECTORY_TRANSFER
     }
 }
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/service/ZooKeeperService.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/service/ZooKeeperService.java
index ec6bd61..977147e 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/service/ZooKeeperService.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/service/ZooKeeperService.java
@@ -49,13 +49,12 @@
     private static final int ZOOKEEPER_SESSION_TIME_OUT = 40 * 1000; //milliseconds
     private static final String ZOOKEEPER_HOME = ManagixDriver.getManagixHome() + File.separator
             + ManagixDriver.MANAGIX_INTERNAL_DIR + File.separator + "zookeeper";
-    private static final String ZOO_KEEPER_CONFIG = ZOOKEEPER_HOME + File.separator + "zk.pkg" + File.separator
-            + "zk.cfg";
+    private static final String ZOO_KEEPER_CONFIG = ZOOKEEPER_HOME + File.separator + "zk.cfg";
 
     private boolean isRunning = false;
     private ZooKeeper zk;
     private String zkConnectionString;
-    private static final String ASTERIX_INSTANCE_BASE_PATH = "/asterix";
+    private static final String ASTERIX_INSTANCE_BASE_PATH = "/Asterix";
     private static final int DEFAULT_NODE_VERSION = -1;
     private LinkedBlockingQueue<String> msgQ = new LinkedBlockingQueue<String>();
     private ZooKeeperWatcher watcher = new ZooKeeperWatcher(msgQ);
@@ -94,7 +93,7 @@
         LOGGER.info("Starting ZooKeeper at " + zkConnectionString);
         ZookeeperUtil.writeConfiguration(ZOO_KEEPER_CONFIG, conf, ZOOKEEPER_LEADER_CONN_PORT,
                 ZOOKEEPER_LEADER_ELEC_PORT);
-        String initScript = ZOOKEEPER_HOME + File.separator + "zk.init";
+        String initScript = ZOOKEEPER_HOME + File.separator + "bin" + File.separator + "zk.init";
         StringBuffer cmdBuffer = new StringBuffer();
         cmdBuffer.append(initScript + " ");
         cmdBuffer.append(conf.getZookeeper().getHomeDir() + " ");
diff --git a/asterix-installer/src/main/resources/clusters/local.xml b/asterix-installer/src/main/resources/clusters/local.xml
new file mode 100644
index 0000000..06eb8ea
--- /dev/null
+++ b/asterix-installer/src/main/resources/clusters/local.xml
@@ -0,0 +1,25 @@
+<cluster xmlns="cluster">
+  <name>local</name>
+  <ram>1024m</ram>
+  <java_home>$JAVA_HOME</java_home>
+  <workingDir>
+     <dir>/Users/ramangrover29/asterix/working</dir>
+     <NFS>true</NFS>
+  </workingDir>
+  <logdir>/Users/ramangrover29/asterix/logs</logdir>
+  <store>/Users/ramangrover29/asterix/storage</store>
+  <master-node>
+     <id>master</id>
+     <ip>127.0.0.1</ip>
+     <cluster-ip>127.0.0.1</cluster-ip>
+     <ram>512m</ram>
+  </master-node>
+  <node>
+     <id>node1</id>
+     <ip>127.0.0.1</ip>
+  </node>
+  <node>
+     <id>node2</id>
+     <ip>127.0.0.1</ip>
+  </node>
+</cluster>
diff --git a/asterix-installer/src/main/resources/conf/asterix.conf b/asterix-installer/src/main/resources/conf/asterix.conf
new file mode 100644
index 0000000..659b48e
--- /dev/null
+++ b/asterix-installer/src/main/resources/conf/asterix.conf
@@ -0,0 +1 @@
+output_dir=/tmp/asterix_output/
diff --git a/asterix-installer/src/main/resources/conf/log4j.properties b/asterix-installer/src/main/resources/conf/log4j.properties
new file mode 100644
index 0000000..0e340f7
--- /dev/null
+++ b/asterix-installer/src/main/resources/conf/log4j.properties
@@ -0,0 +1,60 @@
+# Define some default values that can be overridden by system properties
+zookeeper.root.logger=SEVERE, CONSOLE
+zookeeper.console.threshold=WARN
+zookeeper.log.dir=.
+zookeeper.log.file=zookeeper.log
+zookeeper.log.threshold=WARN
+zookeeper.tracelog.dir=.
+zookeeper.tracelog.file=zookeeper_trace.log
+
+log4j.logger.edu.uci.ics=INFO
+
+#
+# ZooKeeper Logging Configuration
+#
+
+# Format is "<default threshold> (, <appender>)+
+
+# DEFAULT: console appender only
+log4j.rootLogger=WARN
+
+# Example with rolling log file
+#log4j.rootLogger=DEBUG, CONSOLE, ROLLINGFILE
+
+# Example with rolling log file and tracing
+#log4j.rootLogger=TRACE, CONSOLE, ROLLINGFILE, TRACEFILE
+
+#
+# Log INFO level and above messages to the console
+#
+log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender
+log4j.appender.CONSOLE.Threshold=${zookeeper.console.threshold}
+log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout
+log4j.appender.CONSOLE.layout.ConversionPattern=%d{ISO8601} [myid:%X{myid}] - %-5p [%t:%C{1}@%L] - %m%n
+
+#
+# Add ROLLINGFILE to rootLogger to get log file output
+#    Log DEBUG level and above messages to a log file
+log4j.appender.ROLLINGFILE=org.apache.log4j.RollingFileAppender
+log4j.appender.ROLLINGFILE.Threshold=${zookeeper.log.threshold}
+log4j.appender.ROLLINGFILE.File=${zookeeper.log.dir}/${zookeeper.log.file}
+
+# Max log file size of 10MB
+log4j.appender.ROLLINGFILE.MaxFileSize=10MB
+# uncomment the next line to limit number of backup files
+#log4j.appender.ROLLINGFILE.MaxBackupIndex=10
+
+log4j.appender.ROLLINGFILE.layout=org.apache.log4j.PatternLayout
+log4j.appender.ROLLINGFILE.layout.ConversionPattern=%d{ISO8601} [myid:%X{myid}] - %-5p [%t:%C{1}@%L] - %m%n
+
+
+#
+# Add TRACEFILE to rootLogger to get log file output
+#    Log DEBUG level and above messages to a log file
+log4j.appender.TRACEFILE=org.apache.log4j.FileAppender
+log4j.appender.TRACEFILE.Threshold=TRACE
+log4j.appender.TRACEFILE.File=${zookeeper.tracelog.dir}/${zookeeper.tracelog.file}
+
+log4j.appender.TRACEFILE.layout=org.apache.log4j.PatternLayout
+### Notice we are including log4j's NDC here (%x)
+log4j.appender.TRACEFILE.layout.ConversionPattern=%d{ISO8601} [myid:%X{myid}] - %-5p [%t:%C{1}@%L][%x] - %m%n
diff --git a/asterix-installer/src/main/resources/conf/managix-conf.xml b/asterix-installer/src/main/resources/conf/managix-conf.xml
new file mode 100644
index 0000000..9dac8ac
--- /dev/null
+++ b/asterix-installer/src/main/resources/conf/managix-conf.xml
@@ -0,0 +1,16 @@
+<configuration xmlns="managix">
+  <backup>
+    <hdfs>
+      <version>0.20.2</version>
+      <url>hdfs://mjcarey-desktop.ics.uci.edu:54310</url>
+      <backupDir>/user/raman/asterix_desktop</backupDir>
+    </hdfs>
+  </backup>
+  <zookeeper>
+    <homeDir>/Users/ramangrover29/zookeeper</homeDir>
+    <clientPort>2900</clientPort>
+    <servers>
+      <server>localhost</server>
+    </servers>
+  </zookeeper>
+</configuration>
diff --git a/asterix-installer/src/main/resources/hadoop-0.20.2/bin/hadoop b/asterix-installer/src/main/resources/hadoop-0.20.2/bin/hadoop
new file mode 100755
index 0000000..683e95d
--- /dev/null
+++ b/asterix-installer/src/main/resources/hadoop-0.20.2/bin/hadoop
@@ -0,0 +1,290 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# The Hadoop command script
+#
+# Environment Variables
+#
+#   JAVA_HOME        The java implementation to use.  Overrides JAVA_HOME.
+#
+#   HADOOP_CLASSPATH Extra Java CLASSPATH entries.
+#
+#   HADOOP_HEAPSIZE  The maximum amount of heap to use, in MB. 
+#                    Default is 1000.
+#
+#   HADOOP_OPTS      Extra Java runtime options.
+#   
+#   HADOOP_NAMENODE_OPTS       These options are added to HADOOP_OPTS 
+#   HADOOP_CLIENT_OPTS         when the respective command is run.
+#   HADOOP_{COMMAND}_OPTS etc  HADOOP_JT_OPTS applies to JobTracker 
+#                              for e.g.  HADOOP_CLIENT_OPTS applies to 
+#                              more than one command (fs, dfs, fsck, 
+#                              dfsadmin etc)  
+#
+#   HADOOP_CONF_DIR  Alternate conf dir. Default is ${HADOOP_HOME}/conf.
+#
+#   HADOOP_ROOT_LOGGER The root appender. Default is INFO,console
+#
+
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
+
+. "$bin"/hadoop-config.sh
+
+cygwin=false
+case "`uname`" in
+CYGWIN*) cygwin=true;;
+esac
+
+# if no args specified, show usage
+if [ $# = 0 ]; then
+  echo "Usage: hadoop [--config confdir] COMMAND"
+  echo "where COMMAND is one of:"
+  echo "  namenode -format     format the DFS filesystem"
+  echo "  secondarynamenode    run the DFS secondary namenode"
+  echo "  namenode             run the DFS namenode"
+  echo "  datanode             run a DFS datanode"
+  echo "  dfsadmin             run a DFS admin client"
+  echo "  mradmin              run a Map-Reduce admin client"
+  echo "  fsck                 run a DFS filesystem checking utility"
+  echo "  fs                   run a generic filesystem user client"
+  echo "  balancer             run a cluster balancing utility"
+  echo "  jobtracker           run the MapReduce job Tracker node" 
+  echo "  pipes                run a Pipes job"
+  echo "  tasktracker          run a MapReduce task Tracker node" 
+  echo "  job                  manipulate MapReduce jobs"
+  echo "  queue                get information regarding JobQueues" 
+  echo "  version              print the version"
+  echo "  jar <jar>            run a jar file"
+  echo "  distcp <srcurl> <desturl> copy file or directories recursively"
+  echo "  archive -archiveName NAME <src>* <dest> create a hadoop archive"
+  echo "  daemonlog            get/set the log level for each daemon"
+  echo " or"
+  echo "  CLASSNAME            run the class named CLASSNAME"
+  echo "Most commands print help when invoked w/o parameters."
+  exit 1
+fi
+
+# get arguments
+COMMAND=$1
+shift
+
+if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
+  . "${HADOOP_CONF_DIR}/hadoop-env.sh"
+fi
+
+# some Java parameters
+if [ "$JAVA_HOME" != "" ]; then
+  #echo "run java in $JAVA_HOME"
+  JAVA_HOME=$JAVA_HOME
+fi
+  
+if [ "$JAVA_HOME" = "" ]; then
+  echo "Error: JAVA_HOME is not set."
+  exit 1
+fi
+
+JAVA=$JAVA_HOME/bin/java
+JAVA_HEAP_MAX=-Xmx1000m 
+
+# check envvars which might override default args
+if [ "$HADOOP_HEAPSIZE" != "" ]; then
+  #echo "run with heapsize $HADOOP_HEAPSIZE"
+  JAVA_HEAP_MAX="-Xmx""$HADOOP_HEAPSIZE""m"
+  #echo $JAVA_HEAP_MAX
+fi
+
+# CLASSPATH initially contains $HADOOP_CONF_DIR
+CLASSPATH=${HADOOP_CONF_DIR}
+CLASSPATH=${CLASSPATH}:$JAVA_HOME/lib/tools.jar
+
+# for developers, add Hadoop classes to CLASSPATH
+if [ -d "$HADOOP_HOME/build/classes" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/classes
+fi
+if [ -d "$HADOOP_HOME/build/webapps" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build
+fi
+if [ -d "$HADOOP_HOME/build/test/classes" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/test/classes
+fi
+if [ -d "$HADOOP_HOME/build/tools" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/tools
+fi
+
+# so that filenames w/ spaces are handled correctly in loops below
+IFS=
+
+# for releases, add core hadoop jar & webapps to CLASSPATH
+if [ -d "$HADOOP_HOME/webapps" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_HOME
+fi
+for f in $HADOOP_HOME/hadoop-*-core.jar; do
+  CLASSPATH=${CLASSPATH}:$f;
+done
+
+# add libs to CLASSPATH
+for f in $HADOOP_HOME/lib/*.jar; do
+  CLASSPATH=${CLASSPATH}:$f;
+done
+
+if [ -d "$HADOOP_HOME/build/ivy/lib/Hadoop/common" ]; then
+for f in $HADOOP_HOME/build/ivy/lib/Hadoop/common/*.jar; do
+  CLASSPATH=${CLASSPATH}:$f;
+done
+fi
+
+for f in $HADOOP_HOME/lib/jsp-2.1/*.jar; do
+  CLASSPATH=${CLASSPATH}:$f;
+done
+
+for f in $HADOOP_HOME/hadoop-*-tools.jar; do
+  TOOL_PATH=${TOOL_PATH}:$f;
+done
+for f in $HADOOP_HOME/build/hadoop-*-tools.jar; do
+  TOOL_PATH=${TOOL_PATH}:$f;
+done
+
+# add user-specified CLASSPATH last
+if [ "$HADOOP_CLASSPATH" != "" ]; then
+  CLASSPATH=${HADOOP_CLASSPATH}:${CLASSPATH}
+fi
+
+# default log directory & file
+if [ "$HADOOP_LOG_DIR" = "" ]; then
+  HADOOP_LOG_DIR="$HADOOP_HOME/logs"
+fi
+if [ "$HADOOP_LOGFILE" = "" ]; then
+  HADOOP_LOGFILE='hadoop.log'
+fi
+
+# default policy file for service-level authorization
+if [ "$HADOOP_POLICYFILE" = "" ]; then
+  HADOOP_POLICYFILE="hadoop-policy.xml"
+fi
+
+# restore ordinary behaviour
+unset IFS
+
+# figure out which class to run
+if [ "$COMMAND" = "namenode" ] ; then
+  CLASS='org.apache.hadoop.hdfs.server.namenode.NameNode'
+  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_NAMENODE_OPTS"
+elif [ "$COMMAND" = "secondarynamenode" ] ; then
+  CLASS='org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode'
+  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_SECONDARYNAMENODE_OPTS"
+elif [ "$COMMAND" = "datanode" ] ; then
+  CLASS='org.apache.hadoop.hdfs.server.datanode.DataNode'
+  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_DATANODE_OPTS"
+elif [ "$COMMAND" = "fs" ] ; then
+  CLASS=org.apache.hadoop.fs.FsShell
+  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
+elif [ "$COMMAND" = "dfs" ] ; then
+  CLASS=org.apache.hadoop.fs.FsShell
+  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
+elif [ "$COMMAND" = "dfsadmin" ] ; then
+  CLASS=org.apache.hadoop.hdfs.tools.DFSAdmin
+  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
+elif [ "$COMMAND" = "mradmin" ] ; then
+  CLASS=org.apache.hadoop.mapred.tools.MRAdmin
+  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
+elif [ "$COMMAND" = "fsck" ] ; then
+  CLASS=org.apache.hadoop.hdfs.tools.DFSck
+  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
+elif [ "$COMMAND" = "balancer" ] ; then
+  CLASS=org.apache.hadoop.hdfs.server.balancer.Balancer
+  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_BALANCER_OPTS"
+elif [ "$COMMAND" = "jobtracker" ] ; then
+  CLASS=org.apache.hadoop.mapred.JobTracker
+  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_JOBTRACKER_OPTS"
+elif [ "$COMMAND" = "tasktracker" ] ; then
+  CLASS=org.apache.hadoop.mapred.TaskTracker
+  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_TASKTRACKER_OPTS"
+elif [ "$COMMAND" = "job" ] ; then
+  CLASS=org.apache.hadoop.mapred.JobClient
+elif [ "$COMMAND" = "queue" ] ; then
+  CLASS=org.apache.hadoop.mapred.JobQueueClient
+elif [ "$COMMAND" = "pipes" ] ; then
+  CLASS=org.apache.hadoop.mapred.pipes.Submitter
+  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
+elif [ "$COMMAND" = "version" ] ; then
+  CLASS=org.apache.hadoop.util.VersionInfo
+  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
+elif [ "$COMMAND" = "jar" ] ; then
+  CLASS=org.apache.hadoop.util.RunJar
+elif [ "$COMMAND" = "distcp" ] ; then
+  CLASS=org.apache.hadoop.tools.DistCp
+  CLASSPATH=${CLASSPATH}:${TOOL_PATH}
+  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
+elif [ "$COMMAND" = "daemonlog" ] ; then
+  CLASS=org.apache.hadoop.log.LogLevel
+  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
+elif [ "$COMMAND" = "archive" ] ; then
+  CLASS=org.apache.hadoop.tools.HadoopArchives
+  CLASSPATH=${CLASSPATH}:${TOOL_PATH}
+  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
+elif [ "$COMMAND" = "sampler" ] ; then
+  CLASS=org.apache.hadoop.mapred.lib.InputSampler
+  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
+else
+  CLASS=$COMMAND
+fi
+
+# cygwin path translation
+if $cygwin; then
+  CLASSPATH=`cygpath -p -w "$CLASSPATH"`
+  HADOOP_HOME=`cygpath -w "$HADOOP_HOME"`
+  HADOOP_LOG_DIR=`cygpath -w "$HADOOP_LOG_DIR"`
+  TOOL_PATH=`cygpath -p -w "$TOOL_PATH"`
+fi
+# setup 'java.library.path' for native-hadoop code if necessary
+JAVA_LIBRARY_PATH=''
+if [ -d "${HADOOP_HOME}/build/native" -o -d "${HADOOP_HOME}/lib/native" ]; then
+  JAVA_PLATFORM=`CLASSPATH=${CLASSPATH} ${JAVA} -Xmx32m org.apache.hadoop.util.PlatformName | sed -e "s/ /_/g"`
+  
+  if [ -d "$HADOOP_HOME/build/native" ]; then
+    JAVA_LIBRARY_PATH=${HADOOP_HOME}/build/native/${JAVA_PLATFORM}/lib
+  fi
+  
+  if [ -d "${HADOOP_HOME}/lib/native" ]; then
+    if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
+      JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:${HADOOP_HOME}/lib/native/${JAVA_PLATFORM}
+    else
+      JAVA_LIBRARY_PATH=${HADOOP_HOME}/lib/native/${JAVA_PLATFORM}
+    fi
+  fi
+fi
+
+# cygwin path translation
+if $cygwin; then
+  JAVA_LIBRARY_PATH=`cygpath -p "$JAVA_LIBRARY_PATH"`
+fi
+
+HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.dir=$HADOOP_LOG_DIR"
+HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.file=$HADOOP_LOGFILE"
+HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.home.dir=$HADOOP_HOME"
+HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.id.str=$HADOOP_IDENT_STRING"
+HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.root.logger=${HADOOP_ROOT_LOGGER:-INFO,console}"
+if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
+  HADOOP_OPTS="$HADOOP_OPTS -Djava.library.path=$JAVA_LIBRARY_PATH"
+fi  
+HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.policy.file=$HADOOP_POLICYFILE"
+
+# run it
+#echo "exec "$JAVA" $JAVA_HEAP_MAX $HADOOP_OPTS -classpath "$CLASSPATH" $CLASS "$@""
+exec "$JAVA" $JAVA_HEAP_MAX $HADOOP_OPTS -classpath "$CLASSPATH" $CLASS "$@"
diff --git a/asterix-installer/src/main/resources/hadoop-0.20.2/conf/hadoop-config.sh b/asterix-installer/src/main/resources/hadoop-0.20.2/conf/hadoop-config.sh
new file mode 100755
index 0000000..1f9d52d
--- /dev/null
+++ b/asterix-installer/src/main/resources/hadoop-0.20.2/conf/hadoop-config.sh
@@ -0,0 +1,68 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# included in all the hadoop scripts with source command
+# should not be executable directly
+# also should not be passed any arguments, since we need original $*
+
+# resolve links - $0 may be a softlink
+
+this="$0"
+while [ -h "$this" ]; do
+  ls=`ls -ld "$this"`
+  link=`expr "$ls" : '.*-> \(.*\)$'`
+  if expr "$link" : '.*/.*' > /dev/null; then
+    this="$link"
+  else
+    this=`dirname "$this"`/"$link"
+  fi
+done
+
+# convert relative path to absolute path
+bin=`dirname "$this"`
+script=`basename "$this"`
+bin=`cd "$bin"; pwd`
+this="$bin/$script"
+
+# the root of the Hadoop installation
+export HADOOP_HOME=`dirname "$this"`/..
+
+#check to see if the conf dir is given as an optional argument
+if [ $# -gt 1 ]
+then
+    if [ "--config" = "$1" ]
+	  then
+	      shift
+	      confdir=$1
+	      shift
+	      HADOOP_CONF_DIR=$confdir
+    fi
+fi
+ 
+# Allow alternate conf dir location.
+HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-$HADOOP_HOME/conf}"
+
+#check to see it is specified whether to use the slaves or the
+# masters file
+if [ $# -gt 1 ]
+then
+    if [ "--hosts" = "$1" ]
+    then
+        shift
+        slavesfile=$1
+        shift
+        export HADOOP_SLAVES="${HADOOP_CONF_DIR}/$slavesfile"
+    fi
+fi
diff --git a/asterix-installer/src/main/resources/hadoop-0.20.2/conf/hadoop-env.sh b/asterix-installer/src/main/resources/hadoop-0.20.2/conf/hadoop-env.sh
new file mode 100644
index 0000000..5463a35
--- /dev/null
+++ b/asterix-installer/src/main/resources/hadoop-0.20.2/conf/hadoop-env.sh
@@ -0,0 +1,54 @@
+# Set Hadoop-specific environment variables here.
+
+# The only required environment variable is JAVA_HOME.  All others are
+# optional.  When running a distributed configuration it is best to
+# set JAVA_HOME in this file, so that it is correctly defined on
+# remote nodes.
+
+# The java implementation to use.  Required.
+  export JAVA_HOME=/System/Library/Frameworks/JavaVM.framework/Versions/CurrentJDK/Home
+
+# Extra Java CLASSPATH elements.  Optional.
+# export HADOOP_CLASSPATH=
+
+# The maximum amount of heap to use, in MB. Default is 1000.
+# export HADOOP_HEAPSIZE=2000
+
+# Extra Java runtime options.  Empty by default.
+# export HADOOP_OPTS=-server
+
+# Command specific options appended to HADOOP_OPTS when specified
+export HADOOP_NAMENODE_OPTS="-Dcom.sun.management.jmxremote $HADOOP_NAMENODE_OPTS"
+export HADOOP_SECONDARYNAMENODE_OPTS="-Dcom.sun.management.jmxremote $HADOOP_SECONDARYNAMENODE_OPTS"
+export HADOOP_DATANODE_OPTS="-Dcom.sun.management.jmxremote $HADOOP_DATANODE_OPTS"
+export HADOOP_BALANCER_OPTS="-Dcom.sun.management.jmxremote $HADOOP_BALANCER_OPTS"
+export HADOOP_JOBTRACKER_OPTS="-Dcom.sun.management.jmxremote $HADOOP_JOBTRACKER_OPTS"
+# export HADOOP_TASKTRACKER_OPTS=
+# The following applies to multiple commands (fs, dfs, fsck, distcp etc)
+# export HADOOP_CLIENT_OPTS
+
+# Extra ssh options.  Empty by default.
+# export HADOOP_SSH_OPTS="-o ConnectTimeout=1 -o SendEnv=HADOOP_CONF_DIR"
+
+# Where log files are stored.  $HADOOP_HOME/logs by default.
+# export HADOOP_LOG_DIR=${HADOOP_HOME}/logs
+
+# File naming remote slave hosts.  $HADOOP_HOME/conf/slaves by default.
+# export HADOOP_SLAVES=${HADOOP_HOME}/conf/slaves
+
+# host:path where hadoop code should be rsync'd from.  Unset by default.
+# export HADOOP_MASTER=master:/home/$USER/src/hadoop
+
+# Seconds to sleep between slave commands.  Unset by default.  This
+# can be useful in large clusters, where, e.g., slave rsyncs can
+# otherwise arrive faster than the master can service them.
+# export HADOOP_SLAVE_SLEEP=0.1
+
+# The directory where pid files are stored. /tmp by default.
+# export HADOOP_PID_DIR=/var/hadoop/pids
+
+# A string representing this instance of hadoop. $USER by default.
+# export HADOOP_IDENT_STRING=$USER
+
+# The scheduling priority for daemon processes.  See 'man nice'.
+# export HADOOP_NICENESS=10
diff --git a/asterix-installer/src/main/resources/scripts/managix b/asterix-installer/src/main/resources/scripts/managix
new file mode 100755
index 0000000..d788873
--- /dev/null
+++ b/asterix-installer/src/main/resources/scripts/managix
@@ -0,0 +1,20 @@
+if [ -z $MANAGIX_HOME ] 
+ then
+   echo "MANAGIX_HOME is not defined"
+   exit 1
+fi
+
+VERSION=0.0.4-SNAPSHOT
+
+for jar in `ls $MANAGIX_HOME/lib/*.jar`
+  do 
+  if [ -z $MANAGIX_CLASSPATH ] 
+  then 
+    MANAGIX_CLASSPATH=$jar 
+  else
+    MANAGIX_CLASSPATH=$MANAGIX_CLASSPATH:$jar 
+  fi
+done
+
+MANAGIX_CLASSPATH=$MANAGIX_CLASSPATH:$MANAGIX_HOME/conf/log4j.properties
+java $JAVA_OPTS -Dlog4j.configuration=file:$MANAGIX_HOME/conf/log4j.properties -cp $MANAGIX_CLASSPATH edu.uci.ics.asterix.installer.driver.ManagixDriver $@
diff --git a/asterix-installer/src/main/resources/scripts/verify.sh b/asterix-installer/src/main/resources/scripts/verify.sh
new file mode 100755
index 0000000..fe1bb0c
--- /dev/null
+++ b/asterix-installer/src/main/resources/scripts/verify.sh
@@ -0,0 +1,21 @@
+INSTANCE_NAME=$1
+MASTER_NODE=$2
+shift 2
+numargs=$#
+for ((i=1 ; i <= numargs ; i=i+2))
+do
+ host=$1
+ nc_id=$2
+ INFO=$(ssh $host "ps -ef | grep nc_join | grep -v grep | grep -v ssh| grep $nc_id" | head -n 1 )
+ PARENT_ID=`echo  $INFO | cut -d " "  -f2`
+ PID_INFO=$(ssh $host "ps -ef |  grep hyracks | grep -v grep | grep -v nc_join |  grep $PARENT_ID") 
+ PID=`echo $PID_INFO | cut -d " " -f2`
+ echo "NC:$host:$nc_id:$PID"
+ shift 2
+done
+
+CC_PARENT_ID_INFO=$(ssh $MASTER_NODE "ps -ef  | grep hyracks | grep cc_start | grep -v ssh")
+CC_PARENT_ID=`echo $CC_PARENT_ID_INFO | tr -s " " | cut -d " " -f2` 
+CC_ID_INFO=$(ssh $MASTER_NODE "ps -ef | grep hyracks | grep $CC_PARENT_ID | grep -v bash")
+CC_ID=`echo $CC_ID_INFO |  tr -s " " | cut -d " " -f2`
+echo "CC:$MASTER_NODE:N/A:$CC_ID"
diff --git a/asterix-installer/src/main/resources/zookeeper/start_zk.sh b/asterix-installer/src/main/resources/zookeeper/start_zk.sh
new file mode 100755
index 0000000..c8154bf
--- /dev/null
+++ b/asterix-installer/src/main/resources/zookeeper/start_zk.sh
@@ -0,0 +1,8 @@
+ZK_HOME=$1
+ZK_ID=$2
+mkdir $ZK_HOME/data
+echo $2 > $ZK_HOME/data/myid
+CLASSPATH=$ZK_HOME/lib/zookeeper-3.4.4.jar:$ZK_HOME/lib/log4j-1.2.15.jar:$ZK_HOME/lib/slf4j-api-1.6.1.jar:$ZK_HOME/conf:$ZK_HOME/conf/log4j.properties
+ZK_CONF=$ZK_HOME/zk.cfg
+export JAVA_OPTS="-Xdebug -Xrunjdwp:transport=dt_socket,address=8400,server=y,suspend=n"
+java $JAVA_OPTS -Dlog4j.configuration="file:$ZK_HOME/conf/log4j.properties" -cp $CLASSPATH org.apache.zookeeper.server.quorum.QuorumPeerMain $ZK_CONF
diff --git a/asterix-installer/src/main/resources/zookeeper/zk.init b/asterix-installer/src/main/resources/zookeeper/zk.init
new file mode 100755
index 0000000..9b554b1
--- /dev/null
+++ b/asterix-installer/src/main/resources/zookeeper/zk.init
@@ -0,0 +1,13 @@
+ZK_HOME=$1
+shift 1
+cd $MANAGIX_HOME/.managix/zookeeper
+tar cf zk.pkg.tar *
+zk_server_id=1
+for zk_host in  $@
+do
+  ssh $zk_host "mkdir $ZK_HOME"
+  scp ./zk.pkg.tar $zk_host:$ZK_HOME/
+  ssh $zk_host "cd $ZK_HOME && tar xf $ZK_HOME/zk.pkg.tar && chmod +x $ZK_HOME/bin/start_zk.sh"
+  ssh $zk_host "$ZK_HOME/bin/start_zk.sh $ZK_HOME $zk_server_id" &
+  zk_server_id=`expr $zk_server_id + 1`	
+done