Removed getRegistry() calls from CC
git-svn-id: https://hyracks.googlecode.com/svn/trunk/hyracks@67 123451ca-8445-de46-9d55-352943316053
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/AbstractHyracksConnection.java b/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/AbstractHyracksConnection.java
index 5c17085..63cb572 100644
--- a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/AbstractHyracksConnection.java
+++ b/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/AbstractHyracksConnection.java
@@ -18,9 +18,7 @@
import java.io.File;
import java.io.IOException;
import java.io.ObjectOutputStream;
-import java.net.InetAddress;
import java.util.EnumSet;
-import java.util.Map;
import java.util.UUID;
import org.apache.http.HttpResponse;
@@ -100,9 +98,4 @@
public JobStatistics waitForCompletion(UUID jobId) throws Exception {
return hci.waitForCompletion(jobId);
}
-
- @Override
- public Map<String, InetAddress[]> getRegistry() throws Exception {
- return hci.getRegistry();
- }
}
\ No newline at end of file
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/IHyracksClientConnection.java b/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/IHyracksClientConnection.java
index 11978e3..b3c1148 100644
--- a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/IHyracksClientConnection.java
+++ b/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/IHyracksClientConnection.java
@@ -15,9 +15,7 @@
package edu.uci.ics.hyracks.api.client;
import java.io.File;
-import java.net.InetAddress;
import java.util.EnumSet;
-import java.util.Map;
import java.util.UUID;
import edu.uci.ics.hyracks.api.job.JobFlag;
@@ -39,6 +37,4 @@
public void start(UUID jobId) throws Exception;
public JobStatistics waitForCompletion(UUID jobId) throws Exception;
-
- public Map<String, InetAddress[]> getRegistry() throws Exception;
}
\ No newline at end of file
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/IHyracksClientInterface.java b/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/IHyracksClientInterface.java
index fcd114a..0c0089f 100644
--- a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/IHyracksClientInterface.java
+++ b/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/IHyracksClientInterface.java
@@ -14,10 +14,8 @@
*/
package edu.uci.ics.hyracks.api.client;
-import java.net.InetAddress;
import java.rmi.Remote;
import java.util.EnumSet;
-import java.util.Map;
import java.util.UUID;
import edu.uci.ics.hyracks.api.job.JobFlag;
@@ -40,6 +38,4 @@
public void start(UUID jobId) throws Exception;
public JobStatistics waitForCompletion(UUID jobId) throws Exception;
-
- public Map<String, InetAddress[]> getRegistry() throws Exception;
}
\ No newline at end of file
diff --git a/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/ClusterControllerService.java b/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/ClusterControllerService.java
index 1d8aca2..e8586be 100644
--- a/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/ClusterControllerService.java
+++ b/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/ClusterControllerService.java
@@ -21,7 +21,6 @@
import java.io.ObjectOutputStream;
import java.io.OutputStream;
import java.io.PrintWriter;
-import java.net.InetAddress;
import java.rmi.registry.LocateRegistry;
import java.rmi.registry.Registry;
import java.util.EnumSet;
@@ -317,16 +316,6 @@
}
@Override
- public Map<String, InetAddress[]> getRegistry() throws Exception {
- Map<String, INodeController> map = new HashMap<String, INodeController>();
- for (Map.Entry<String, NodeControllerState> e : nodeRegistry.entrySet()) {
- map.put(e.getKey(), e.getValue().getNodeController());
- }
- // return map;
- throw new UnsupportedOperationException();
- }
-
- @Override
public synchronized void nodeHeartbeat(String id) throws Exception {
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Heartbeat from: " + id);
diff --git a/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopReadOperatorDescriptor.java b/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopReadOperatorDescriptor.java
index 288fdbd..e7b2714 100644
--- a/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopReadOperatorDescriptor.java
+++ b/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopReadOperatorDescriptor.java
@@ -15,13 +15,8 @@
package edu.uci.ics.hyracks.dataflow.hadoop;
import java.io.IOException;
-import java.net.InetAddress;
import java.net.InetSocketAddress;
-import java.net.NetworkInterface;
-import java.util.ArrayList;
-import java.util.Enumeration;
import java.util.HashMap;
-import java.util.List;
import java.util.Map;
import org.apache.hadoop.fs.FileSystem;
@@ -34,14 +29,9 @@
import org.apache.hadoop.mapred.SequenceFileRecordReader;
import org.apache.hadoop.util.ReflectionUtils;
-import edu.uci.ics.hyracks.api.constraints.AbsoluteLocationConstraint;
-import edu.uci.ics.hyracks.api.constraints.ExplicitPartitionConstraint;
-import edu.uci.ics.hyracks.api.constraints.LocationConstraint;
-import edu.uci.ics.hyracks.api.constraints.PartitionConstraint;
import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
import edu.uci.ics.hyracks.api.job.JobSpecification;
import edu.uci.ics.hyracks.dataflow.hadoop.util.DatatypeHelper;
-import edu.uci.ics.hyracks.dataflow.hadoop.util.HadoopAdapter;
import edu.uci.ics.hyracks.dataflow.hadoop.util.HadoopFileSplit;
import edu.uci.ics.hyracks.dataflow.std.file.IRecordReader;
@@ -147,90 +137,10 @@
this.jobConfMap = jobConfMap;
}
- // public HadoopReadOperatorDescriptor(IClusterController clusterController, Map<String, String> jobConfMap,
- // JobSpecification spec, String fileSystemURL, String inputFormatClassName, RecordDescriptor recordDescriptor) {
- // super(spec, null, recordDescriptor);
- // HadoopAdapter hadoopAdapter = HadoopAdapter.getInstance(fileSystemURL);
- // String inputPathString = jobConfMap.get("mapred.input.dir");
- // String[] inputPaths = inputPathString.split(",");
- // Map<String, List<HadoopFileSplit>> blocksToRead = hadoopAdapter.getInputSplits(inputPaths);
- // List<HadoopFileSplit> hadoopFileSplits = new ArrayList<HadoopFileSplit>();
- // for (String filePath : blocksToRead.keySet()) {
- // hadoopFileSplits.addAll(blocksToRead.get(filePath));
- // }
- // for (HadoopFileSplit hadoopFileSplit : hadoopFileSplits) {
- // System.out.println(" Hadoop File Split : " + hadoopFileSplit);
- // }
- // super.splits = hadoopFileSplits.toArray(new HadoopFileSplit[] {});
- // configurePartitionConstraints(clusterController, blocksToRead);
- // this.inputFormatClassName = inputFormatClassName;
- // this.jobConfMap = jobConfMap;
- // }
-
- // private void configurePartitionConstraints(IClusterController clusterController,
- // Map<String, List<HadoopFileSplit>> blocksToRead) {
- // List<LocationConstraint> locationConstraints = new ArrayList<LocationConstraint>();
- // Map<String, INodeController> registry = null;
- // try {
- // // registry = clusterController.getRegistry();
- // // TODO
- // } catch (Exception e) {
- // e.printStackTrace();
- // }
- // Map<String, String> hostnameToNodeIdMap = new HashMap<String, String>();
- // NCConfig ncConfig = null;
- // for (String nodeId : registry.keySet()) {
- // try {
- // ncConfig = ((INodeController) registry.get(nodeId)).getConfiguration();
- // String ipAddress = ncConfig.dataIPAddress;
- // String hostname = InetAddress.getByName(ipAddress).getHostName();
- // System.out.println(" hostname :" + hostname + " nodeid:" + nodeId);
- // hostnameToNodeIdMap.put(hostname, nodeId);
- // } catch (Exception e) {
- // e.printStackTrace();
- // }
- // }
- //
- // for (String filePath : blocksToRead.keySet()) {
- // List<HadoopFileSplit> hadoopFileSplits = blocksToRead.get(filePath);
- // for (HadoopFileSplit hadoopFileSplit : hadoopFileSplits) {
- // String hostname = hadoopFileSplit.getHosts()[0];
- // System.out.println("host name is :" + hostname);
- // InetAddress address = null;
- // try {
- // address = InetAddress.getByName(hostname);
- // if (address.isLoopbackAddress()) {
- // Enumeration<NetworkInterface> netInterfaces = NetworkInterface.getNetworkInterfaces();
- // while (netInterfaces.hasMoreElements()) {
- // NetworkInterface ni = netInterfaces.nextElement();
- // InetAddress inetAddress = (InetAddress) ni.getInetAddresses().nextElement();
- // if (!inetAddress.isLoopbackAddress()) {
- // address = inetAddress;
- // break;
- // }
- // }
- // }
- // hostname = address.getHostName();
- // System.out.println("cannonical host name hyracks :" + hostname);
- // } catch (Exception e) {
- // e.printStackTrace();
- // }
- // String nodeId = hostnameToNodeIdMap.get(hostname);
- // System.out.println(" corresponding node id is :" + nodeId);
- // LocationConstraint locationConstraint = new AbsoluteLocationConstraint(nodeId);
- // locationConstraints.add(locationConstraint);
- // }
- // }
- //
- // PartitionConstraint partitionConstraint = new ExplicitPartitionConstraint(locationConstraints
- // .toArray(new LocationConstraint[] {}));
- // this.setPartitionConstraint(partitionConstraint);
- // }
-
@Override
protected IRecordReader createRecordReader(HadoopFileSplit fileSplit, RecordDescriptor desc) throws Exception {
Reporter reporter = createReporter();
IRecordReader recordReader = new HDFSCustomReader(jobConfMap, fileSplit, inputFormatClassName, reporter);
return recordReader;
}
-}
+}
\ No newline at end of file