hdt-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From rsha...@apache.org
Subject [2/6] git commit: - Adding hadoop2 based on 2.2 version - Changing fragments to plugins(Fragment classpath is appended to host classpath which causes issues thus making it as plugin) - Loading classes in diffrent context loaders(http://wiki.eclipse.or
Date Fri, 20 Jun 2014 08:37:41 GMT
 - Adding hadoop2 based on 2.2 version
 - Changing fragments to plugins(Fragment classpath is appended to host classpath which causes issues thus making it as plugin)
 - Loading classes in diffrent context loaders(http://wiki.eclipse.org/FAQ_How_do_I_use_the_context_class_loader_in_Eclipse%3F)
 - Adding version to HDFSServer which can enble to determine version of client.
 - Updation the hadoop locationWizard to show resoucemanager address and select hadoop version


Project: http://git-wip-us.apache.org/repos/asf/incubator-hdt/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hdt/commit/34799cec
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hdt/tree/34799cec
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hdt/diff/34799cec

Branch: refs/heads/hadoop-eclipse-merge-development
Commit: 34799cece189198189fb8e8c64a1d8b2ff397f52
Parents: a7a89f4
Author: Rahul Sharma <rsharma@apache.org>
Authored: Fri May 23 16:35:30 2014 +0530
Committer: Rahul Sharma <rsharma@apache.org>
Committed: Tue Jun 10 10:17:30 2014 +0530

----------------------------------------------------------------------
 org.apache.hdt.core/META-INF/MANIFEST.MF        |   9 +-
 org.apache.hdt.core/models/Hadoop.ecore         |   2 +
 .../hdt/core/internal/hdfs/HDFSFileStore.java   |   2 +-
 .../hdt/core/internal/hdfs/HDFSManager.java     |  17 +-
 .../internal/hdfs/InterruptableHDFSClient.java  |   9 +-
 .../hdt/core/internal/model/HDFSServer.java     |  28 +
 .../hdt/core/internal/model/HadoopPackage.java  |  30 +-
 .../internal/model/impl/HDFSServerImpl.java     |  54 ++
 .../internal/model/impl/HadoopFactoryImpl.java  |   2 +-
 .../internal/model/impl/HadoopPackageImpl.java  |  11 +
 .../core/internal/model/util/HadoopSwitch.java  |  36 +-
 .../hdt/core/launch/AbstractHadoopCluster.java  |  43 +-
 .../org/apache/hdt/core/launch/ConfProp.java    |   8 +-
 org.apache.hdt.feature/.classpath               |   3 +-
 org.apache.hdt.feature/.project                 |   1 +
 org.apache.hdt.feature/feature.xml              |  10 +-
 .../META-INF/MANIFEST.MF                        |   7 +-
 org.apache.hdt.hadoop.release/build.properties  |   2 +-
 org.apache.hdt.hadoop.release/fragment.xml      |  43 --
 org.apache.hdt.hadoop.release/plugin.xml        |  43 ++
 .../hdt/hadoop/release/HadoopCluster.java       |  65 +-
 .../apache/hdt/hadoop/release/HadoopJob.java    |   2 +
 org.apache.hdt.hadoop2.release/.classpath       |  91 +++
 .../.settings/org.eclipse.core.resources.prefs  |   2 +
 .../.settings/org.eclipse.jdt.core.prefs        |   7 +
 .../.settings/org.eclipse.m2e.core.prefs        |   4 +
 .../META-INF/MANIFEST.MF                        |  98 +++
 org.apache.hdt.hadoop2.release/build.properties |  23 +
 org.apache.hdt.hadoop2.release/plugin.xml       |  35 ++
 org.apache.hdt.hadoop2.release/pom.xml          | 127 ++++
 .../hdt/hadoop2/release/HDFSClientRelease.java  | 235 +++++++
 .../hdt/hadoop2/release/HadoopCluster.java      | 619 +++++++++++++++++++
 .../apache/hdt/hadoop2/release/HadoopJob.java   | 338 ++++++++++
 .../hdfs/HDFSLightweightLabelDecorator.java     |   2 +-
 .../internal/hdfs/NewHDFSServerWizardPage.java  |  36 +-
 .../hdt/ui/internal/hdfs/NewHDFSWizard.java     |   3 +-
 .../internal/launch/HadoopLocationWizard.java   | 214 ++++---
 org.apache.hdt.updateSite/.classpath            |  15 +-
 org.apache.hdt.updateSite/.project              |   1 +
 pom.xml                                         |   1 +
 40 files changed, 2066 insertions(+), 212 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/34799cec/org.apache.hdt.core/META-INF/MANIFEST.MF
----------------------------------------------------------------------
diff --git a/org.apache.hdt.core/META-INF/MANIFEST.MF b/org.apache.hdt.core/META-INF/MANIFEST.MF
index 1d6b8c4..e50301c 100644
--- a/org.apache.hdt.core/META-INF/MANIFEST.MF
+++ b/org.apache.hdt.core/META-INF/MANIFEST.MF
@@ -16,7 +16,7 @@ Bundle-RequiredExecutionEnvironment: JavaSE-1.6
 Bundle-Vendor: Apache Hadoop
 Bundle-ClassPath: .,
  jars/log4j-1.2.15.jar
-Export-Package:  org.apache.hdt.core,
+Export-Package: org.apache.hdt.core,
  org.apache.hdt.core.hdfs,
  org.apache.hdt.core.internal,
  org.apache.hdt.core.internal.hdfs;x-friends:="org.apache.hdt.ui",
@@ -45,9 +45,6 @@ Export-Package:  org.apache.hdt.core,
  org.apache.log4j.or.sax,
  org.apache.log4j.spi,
  org.apache.log4j.varia,
- org.apache.log4j.xml,
- org.apache.hadoop,
- org.apache.hadoop.conf,
- org.apache.hadoop.io,
- org.apache.hadoop.mapred
+ org.apache.log4j.xml
 Bundle-ActivationPolicy: lazy
+

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/34799cec/org.apache.hdt.core/models/Hadoop.ecore
----------------------------------------------------------------------
diff --git a/org.apache.hdt.core/models/Hadoop.ecore b/org.apache.hdt.core/models/Hadoop.ecore
index 2b3e8ea..70207c0 100644
--- a/org.apache.hdt.core/models/Hadoop.ecore
+++ b/org.apache.hdt.core/models/Hadoop.ecore
@@ -30,6 +30,8 @@
     <eStructuralFeatures xsi:type="ecore:EAttribute" name="userId" eType="ecore:EDataType http://www.eclipse.org/emf/2002/Ecore#//EString"/>
     <eStructuralFeatures xsi:type="ecore:EAttribute" name="groupIds" upperBound="-1"
         eType="ecore:EDataType http://www.eclipse.org/emf/2002/Ecore#//EString"/>
+    <eStructuralFeatures xsi:type="ecore:EAttribute" name="version" eType="ecore:EDataType http://www.eclipse.org/emf/2002/Ecore#//EString"
+         defaultValueLiteral="1.0.0.0"/>
   </eClassifiers>
   <eClassifiers xsi:type="ecore:EClass" name="Servers">
     <eStructuralFeatures xsi:type="ecore:EReference" name="hdfsServers" upperBound="-1"

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/34799cec/org.apache.hdt.core/src/org/apache/hdt/core/internal/hdfs/HDFSFileStore.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.core/src/org/apache/hdt/core/internal/hdfs/HDFSFileStore.java b/org.apache.hdt.core/src/org/apache/hdt/core/internal/hdfs/HDFSFileStore.java
index ffd68ec..2809e55 100644
--- a/org.apache.hdt.core/src/org/apache/hdt/core/internal/hdfs/HDFSFileStore.java
+++ b/org.apache.hdt.core/src/org/apache/hdt/core/internal/hdfs/HDFSFileStore.java
@@ -115,7 +115,7 @@ public class HDFSFileStore extends FileStore {
 	 * @throws CoreException
 	 */
 	private HDFSClient getClient() throws CoreException {
-		return HDFSManager.INSTANCE.getClient(getServer().getUri());
+		return HDFSManager.INSTANCE.getClient(getServer().getUri(),getServer().getVersion());
 	}
 
 	/**

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/34799cec/org.apache.hdt.core/src/org/apache/hdt/core/internal/hdfs/HDFSManager.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.core/src/org/apache/hdt/core/internal/hdfs/HDFSManager.java b/org.apache.hdt.core/src/org/apache/hdt/core/internal/hdfs/HDFSManager.java
index 5897cea..43ebf1f 100644
--- a/org.apache.hdt.core/src/org/apache/hdt/core/internal/hdfs/HDFSManager.java
+++ b/org.apache.hdt.core/src/org/apache/hdt/core/internal/hdfs/HDFSManager.java
@@ -150,7 +150,7 @@ public class HDFSManager {
 	 * @return
 	 * @throws CoreException
 	 */
-	public HDFSServer createServer(String name, java.net.URI hdfsURI, String userId, List<String> groupIds) throws CoreException {
+	public HDFSServer createServer(String name, java.net.URI hdfsURI, String userId, List<String> groupIds,String version) throws CoreException {
 		if (hdfsURI.getPath() == null || hdfsURI.getPath().length() < 1) {
 			try {
 				hdfsURI = new java.net.URI(hdfsURI.toString() + "/");
@@ -163,6 +163,7 @@ public class HDFSManager {
 		hdfsServer.setName(name);
 		hdfsServer.setUri(hdfsURI.toString());
 		hdfsServer.setLoaded(true);
+		hdfsServer.setVersion(version);
 		if (userId != null)
 			hdfsServer.setUserId(userId);
 		if (groupIds != null)
@@ -289,7 +290,7 @@ public class HDFSManager {
 	 * @return
 	 * @throws CoreException
 	 */
-	public HDFSClient getClient(String serverURI) throws CoreException {
+	public HDFSClient getClient(String serverURI,String hdfsVersion) throws CoreException {
 		if (logger.isDebugEnabled())
 			logger.debug("getClient(" + serverURI + "): Server=" + serverURI);
 		HDFSServer server = getServer(serverURI);
@@ -306,8 +307,11 @@ public class HDFSManager {
 				IConfigurationElement[] elementsFor = Platform.getExtensionRegistry().getConfigurationElementsFor("org.apache.hdt.core.hdfsClient");
 				for (IConfigurationElement element : elementsFor) {
 					if (sUri.getScheme().equals(element.getAttribute("protocol"))) {
-						HDFSClient client = (HDFSClient) element.createExecutableExtension("class");
-						hdfsClientsMap.put(serverURI, new InterruptableHDFSClient(serverURI, client));
+						String version = element.getAttribute("protocolVersion");
+						if(hdfsVersion.equalsIgnoreCase(version)){
+							HDFSClient client = (HDFSClient) element.createExecutableExtension("class");
+							hdfsClientsMap.put(serverURI, new InterruptableHDFSClient(serverURI, client));						
+						}
 					}
 				}
 			} catch (URISyntaxException e) {
@@ -317,9 +321,10 @@ public class HDFSManager {
 		}
 	}
 	
-	public static org.eclipse.core.runtime.IStatus addServer(String serverName, String location, String userId, List<String> groupId) {
+	public static org.eclipse.core.runtime.IStatus addServer(String serverName, String location,
+			String userId, List<String> groupId,String version) {
 		try {
-			HDFSManager.INSTANCE.createServer(serverName, new URI(location), userId, groupId);
+			HDFSManager.INSTANCE.createServer(serverName, new URI(location), userId, groupId,version);
 		} catch (CoreException e) {
 			logger.warn(e.getMessage(), e);
 			return e.getStatus();

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/34799cec/org.apache.hdt.core/src/org/apache/hdt/core/internal/hdfs/InterruptableHDFSClient.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.core/src/org/apache/hdt/core/internal/hdfs/InterruptableHDFSClient.java b/org.apache.hdt.core/src/org/apache/hdt/core/internal/hdfs/InterruptableHDFSClient.java
index 0301d5f..b6e9c46 100644
--- a/org.apache.hdt.core/src/org/apache/hdt/core/internal/hdfs/InterruptableHDFSClient.java
+++ b/org.apache.hdt.core/src/org/apache/hdt/core/internal/hdfs/InterruptableHDFSClient.java
@@ -28,7 +28,6 @@ import java.util.List;
 import org.apache.hdt.core.hdfs.HDFSClient;
 import org.apache.hdt.core.hdfs.ResourceInformation;
 import org.apache.hdt.core.internal.model.HDFSServer;
-import org.apache.hdt.core.internal.model.ServerStatus;
 import org.apache.log4j.Logger;
 import org.eclipse.core.resources.IProject;
 import org.eclipse.core.resources.ResourcesPlugin;
@@ -41,9 +40,6 @@ import org.eclipse.core.resources.ResourcesPlugin;
 public class InterruptableHDFSClient extends HDFSClient {
 	private static final int DEFAULT_TIMEOUT = 5000;
 	private static final Logger logger = Logger.getLogger(InterruptableHDFSClient.class);
-	// private static ExecutorService threadPool =
-	// Executors.newFixedThreadPool(10);
-
 	private final HDFSClient client;
 	private final int timeoutMillis = DEFAULT_TIMEOUT;
 	private final String serverURI;
@@ -67,12 +63,17 @@ public class InterruptableHDFSClient extends HDFSClient {
 		final InterruptedException[] inE = new InterruptedException[1];
 		Thread runnerThread = new Thread(new Runnable() {
 			public void run() {
+     		   Thread current = Thread.currentThread();
+	      	   ClassLoader oldLoader = current.getContextClassLoader();
 				try {
+					current.setContextClassLoader(client.getClass().getClassLoader());
 					data.add(runnable.run());
 				} catch (IOException e) {
 					ioE[0] = e;
 				} catch (InterruptedException e) {
 					inE[0] = e;
+				}finally {
+				      current.setContextClassLoader(oldLoader);
 				}
 			}
 		});

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/34799cec/org.apache.hdt.core/src/org/apache/hdt/core/internal/model/HDFSServer.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.core/src/org/apache/hdt/core/internal/model/HDFSServer.java b/org.apache.hdt.core/src/org/apache/hdt/core/internal/model/HDFSServer.java
index be04f74..0419f2a 100644
--- a/org.apache.hdt.core/src/org/apache/hdt/core/internal/model/HDFSServer.java
+++ b/org.apache.hdt.core/src/org/apache/hdt/core/internal/model/HDFSServer.java
@@ -33,6 +33,7 @@ import org.eclipse.emf.ecore.EObject;
  *   <li>{@link org.apache.hdt.core.internal.model.HDFSServer#getOperationURIs <em>Operation UR Is</em>}</li>
  *   <li>{@link org.apache.hdt.core.internal.model.HDFSServer#getUserId <em>User Id</em>}</li>
  *   <li>{@link org.apache.hdt.core.internal.model.HDFSServer#getGroupIds <em>Group Ids</em>}</li>
+ *   <li>{@link org.apache.hdt.core.internal.model.HDFSServer#getVersion <em>Version</em>}</li>
  * </ul>
  * </p>
  *
@@ -124,4 +125,31 @@ public interface HDFSServer extends Server {
 	 */
 	EList<String> getGroupIds();
 
+	/**
+	 * Returns the value of the '<em><b>Version</b></em>' attribute.
+	 * The default value is <code>"1.0.0.0"</code>.
+	 * <!-- begin-user-doc -->
+	 * <p>
+	 * If the meaning of the '<em>Version</em>' attribute isn't clear,
+	 * there really should be more of a description here...
+	 * </p>
+	 * <!-- end-user-doc -->
+	 * @return the value of the '<em>Version</em>' attribute.
+	 * @see #setVersion(String)
+	 * @see org.apache.hdt.core.internal.model.HadoopPackage#getHDFSServer_Version()
+	 * @model default="1.0.0.0"
+	 * @generated
+	 */
+	String getVersion();
+
+	/**
+	 * Sets the value of the '{@link org.apache.hdt.core.internal.model.HDFSServer#getVersion <em>Version</em>}' attribute.
+	 * <!-- begin-user-doc -->
+	 * <!-- end-user-doc -->
+	 * @param value the new value of the '<em>Version</em>' attribute.
+	 * @see #getVersion()
+	 * @generated
+	 */
+	void setVersion(String value);
+
 } // HDFSServer

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/34799cec/org.apache.hdt.core/src/org/apache/hdt/core/internal/model/HadoopPackage.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.core/src/org/apache/hdt/core/internal/model/HadoopPackage.java b/org.apache.hdt.core/src/org/apache/hdt/core/internal/model/HadoopPackage.java
index 8332b4e..f2fd035 100644
--- a/org.apache.hdt.core/src/org/apache/hdt/core/internal/model/HadoopPackage.java
+++ b/org.apache.hdt.core/src/org/apache/hdt/core/internal/model/HadoopPackage.java
@@ -228,13 +228,22 @@ public interface HadoopPackage extends EPackage {
 	int HDFS_SERVER__GROUP_IDS = SERVER_FEATURE_COUNT + 3;
 
 	/**
+	 * The feature id for the '<em><b>Version</b></em>' attribute.
+	 * <!-- begin-user-doc -->
+	 * <!-- end-user-doc -->
+	 * @generated
+	 * @ordered
+	 */
+	int HDFS_SERVER__VERSION = SERVER_FEATURE_COUNT + 4;
+
+	/**
 	 * The number of structural features of the '<em>HDFS Server</em>' class.
 	 * <!-- begin-user-doc -->
 	 * <!-- end-user-doc -->
 	 * @generated
 	 * @ordered
 	 */
-	int HDFS_SERVER_FEATURE_COUNT = SERVER_FEATURE_COUNT + 4;
+	int HDFS_SERVER_FEATURE_COUNT = SERVER_FEATURE_COUNT + 5;
 
 	/**
 	 * The meta object id for the '{@link org.apache.hdt.core.internal.model.impl.ServersImpl <em>Servers</em>}' class.
@@ -737,6 +746,17 @@ public interface HadoopPackage extends EPackage {
 	EAttribute getHDFSServer_GroupIds();
 
 	/**
+	 * Returns the meta object for the attribute '{@link org.apache.hdt.core.internal.model.HDFSServer#getVersion <em>Version</em>}'.
+	 * <!-- begin-user-doc -->
+	 * <!-- end-user-doc -->
+	 * @return the meta object for the attribute '<em>Version</em>'.
+	 * @see org.apache.hdt.core.internal.model.HDFSServer#getVersion()
+	 * @see #getHDFSServer()
+	 * @generated
+	 */
+	EAttribute getHDFSServer_Version();
+
+	/**
 	 * Returns the meta object for class '{@link org.apache.hdt.core.internal.model.Servers <em>Servers</em>}'.
 	 * <!-- begin-user-doc -->
 	 * <!-- end-user-doc -->
@@ -1126,6 +1146,14 @@ public interface HadoopPackage extends EPackage {
 		EAttribute HDFS_SERVER__GROUP_IDS = eINSTANCE.getHDFSServer_GroupIds();
 
 		/**
+		 * The meta object literal for the '<em><b>Version</b></em>' attribute feature.
+		 * <!-- begin-user-doc -->
+		 * <!-- end-user-doc -->
+		 * @generated
+		 */
+		EAttribute HDFS_SERVER__VERSION = eINSTANCE.getHDFSServer_Version();
+
+		/**
 		 * The meta object literal for the '{@link org.apache.hdt.core.internal.model.impl.ServersImpl <em>Servers</em>}' class.
 		 * <!-- begin-user-doc -->
 		 * <!-- end-user-doc -->

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/34799cec/org.apache.hdt.core/src/org/apache/hdt/core/internal/model/impl/HDFSServerImpl.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.core/src/org/apache/hdt/core/internal/model/impl/HDFSServerImpl.java b/org.apache.hdt.core/src/org/apache/hdt/core/internal/model/impl/HDFSServerImpl.java
index ed25f07..5cc260c 100644
--- a/org.apache.hdt.core/src/org/apache/hdt/core/internal/model/impl/HDFSServerImpl.java
+++ b/org.apache.hdt.core/src/org/apache/hdt/core/internal/model/impl/HDFSServerImpl.java
@@ -43,6 +43,7 @@ import org.eclipse.emf.ecore.util.EDataTypeUniqueEList;
  *   <li>{@link org.apache.hdt.core.internal.model.impl.HDFSServerImpl#getOperationURIs <em>Operation UR Is</em>}</li>
  *   <li>{@link org.apache.hdt.core.internal.model.impl.HDFSServerImpl#getUserId <em>User Id</em>}</li>
  *   <li>{@link org.apache.hdt.core.internal.model.impl.HDFSServerImpl#getGroupIds <em>Group Ids</em>}</li>
+ *   <li>{@link org.apache.hdt.core.internal.model.impl.HDFSServerImpl#getVersion <em>Version</em>}</li>
  * </ul>
  * </p>
  *
@@ -110,6 +111,26 @@ public class HDFSServerImpl extends ServerImpl implements HDFSServer {
 	protected EList<String> groupIds;
 
 	/**
+	 * The default value of the '{@link #getVersion() <em>Version</em>}' attribute.
+	 * <!-- begin-user-doc -->
+	 * <!-- end-user-doc -->
+	 * @see #getVersion()
+	 * @generated
+	 * @ordered
+	 */
+	protected static final String VERSION_EDEFAULT = "1.0.0.0";
+
+	/**
+	 * The cached value of the '{@link #getVersion() <em>Version</em>}' attribute.
+	 * <!-- begin-user-doc -->
+	 * <!-- end-user-doc -->
+	 * @see #getVersion()
+	 * @generated
+	 * @ordered
+	 */
+	protected String version = VERSION_EDEFAULT;
+
+	/**
 	 * <!-- begin-user-doc -->
 	 * <!-- end-user-doc -->
 	 * @generated
@@ -199,6 +220,27 @@ public class HDFSServerImpl extends ServerImpl implements HDFSServer {
 	 * <!-- end-user-doc -->
 	 * @generated
 	 */
+	public String getVersion() {
+		return version;
+	}
+
+	/**
+	 * <!-- begin-user-doc -->
+	 * <!-- end-user-doc -->
+	 * @generated
+	 */
+	public void setVersion(String newVersion) {
+		String oldVersion = version;
+		version = newVersion;
+		if (eNotificationRequired())
+			eNotify(new ENotificationImpl(this, Notification.SET, HadoopPackage.HDFS_SERVER__VERSION, oldVersion, version));
+	}
+
+	/**
+	 * <!-- begin-user-doc -->
+	 * <!-- end-user-doc -->
+	 * @generated
+	 */
 	@Override
 	public Object eGet(int featureID, boolean resolve, boolean coreType) {
 		switch (featureID) {
@@ -210,6 +252,8 @@ public class HDFSServerImpl extends ServerImpl implements HDFSServer {
 				return getUserId();
 			case HadoopPackage.HDFS_SERVER__GROUP_IDS:
 				return getGroupIds();
+			case HadoopPackage.HDFS_SERVER__VERSION:
+				return getVersion();
 		}
 		return super.eGet(featureID, resolve, coreType);
 	}
@@ -237,6 +281,9 @@ public class HDFSServerImpl extends ServerImpl implements HDFSServer {
 				getGroupIds().clear();
 				getGroupIds().addAll((Collection<? extends String>)newValue);
 				return;
+			case HadoopPackage.HDFS_SERVER__VERSION:
+				setVersion((String)newValue);
+				return;
 		}
 		super.eSet(featureID, newValue);
 	}
@@ -261,6 +308,9 @@ public class HDFSServerImpl extends ServerImpl implements HDFSServer {
 			case HadoopPackage.HDFS_SERVER__GROUP_IDS:
 				getGroupIds().clear();
 				return;
+			case HadoopPackage.HDFS_SERVER__VERSION:
+				setVersion(VERSION_EDEFAULT);
+				return;
 		}
 		super.eUnset(featureID);
 	}
@@ -281,6 +331,8 @@ public class HDFSServerImpl extends ServerImpl implements HDFSServer {
 				return USER_ID_EDEFAULT == null ? userId != null : !USER_ID_EDEFAULT.equals(userId);
 			case HadoopPackage.HDFS_SERVER__GROUP_IDS:
 				return groupIds != null && !groupIds.isEmpty();
+			case HadoopPackage.HDFS_SERVER__VERSION:
+				return VERSION_EDEFAULT == null ? version != null : !VERSION_EDEFAULT.equals(version);
 		}
 		return super.eIsSet(featureID);
 	}
@@ -303,6 +355,8 @@ public class HDFSServerImpl extends ServerImpl implements HDFSServer {
 		result.append(userId);
 		result.append(", groupIds: ");
 		result.append(groupIds);
+		result.append(", version: ");
+		result.append(version);
 		result.append(')');
 		return result.toString();
 	}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/34799cec/org.apache.hdt.core/src/org/apache/hdt/core/internal/model/impl/HadoopFactoryImpl.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.core/src/org/apache/hdt/core/internal/model/impl/HadoopFactoryImpl.java b/org.apache.hdt.core/src/org/apache/hdt/core/internal/model/impl/HadoopFactoryImpl.java
index c3e5c2b..ac640c8 100644
--- a/org.apache.hdt.core/src/org/apache/hdt/core/internal/model/impl/HadoopFactoryImpl.java
+++ b/org.apache.hdt.core/src/org/apache/hdt/core/internal/model/impl/HadoopFactoryImpl.java
@@ -44,7 +44,7 @@ public class HadoopFactoryImpl extends EFactoryImpl implements HadoopFactory {
 	 */
 	public static HadoopFactory init() {
 		try {
-			HadoopFactory theHadoopFactory = (HadoopFactory)EPackage.Registry.INSTANCE.getEFactory("http://hadoop/1.0"); 
+			HadoopFactory theHadoopFactory = (HadoopFactory)EPackage.Registry.INSTANCE.getEFactory(HadoopPackage.eNS_URI);
 			if (theHadoopFactory != null) {
 				return theHadoopFactory;
 			}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/34799cec/org.apache.hdt.core/src/org/apache/hdt/core/internal/model/impl/HadoopPackageImpl.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.core/src/org/apache/hdt/core/internal/model/impl/HadoopPackageImpl.java b/org.apache.hdt.core/src/org/apache/hdt/core/internal/model/impl/HadoopPackageImpl.java
index a698d56..c436729 100644
--- a/org.apache.hdt.core/src/org/apache/hdt/core/internal/model/impl/HadoopPackageImpl.java
+++ b/org.apache.hdt.core/src/org/apache/hdt/core/internal/model/impl/HadoopPackageImpl.java
@@ -196,6 +196,15 @@ public class HadoopPackageImpl extends EPackageImpl implements HadoopPackage {
 	 * <!-- end-user-doc -->
 	 * @generated
 	 */
+	public EAttribute getHDFSServer_Version() {
+		return (EAttribute)hdfsServerEClass.getEStructuralFeatures().get(4);
+	}
+
+	/**
+	 * <!-- begin-user-doc -->
+	 * <!-- end-user-doc -->
+	 * @generated
+	 */
 	public EClass getServers() {
 		return serversEClass;
 	}
@@ -494,6 +503,7 @@ public class HadoopPackageImpl extends EPackageImpl implements HadoopPackage {
 		createEAttribute(hdfsServerEClass, HDFS_SERVER__OPERATION_UR_IS);
 		createEAttribute(hdfsServerEClass, HDFS_SERVER__USER_ID);
 		createEAttribute(hdfsServerEClass, HDFS_SERVER__GROUP_IDS);
+		createEAttribute(hdfsServerEClass, HDFS_SERVER__VERSION);
 
 		serversEClass = createEClass(SERVERS);
 		createEReference(serversEClass, SERVERS__HDFS_SERVERS);
@@ -570,6 +580,7 @@ public class HadoopPackageImpl extends EPackageImpl implements HadoopPackage {
 		initEAttribute(getHDFSServer_OperationURIs(), ecorePackage.getEString(), "operationURIs", null, 0, -1, HDFSServer.class, IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
 		initEAttribute(getHDFSServer_UserId(), ecorePackage.getEString(), "userId", null, 0, 1, HDFSServer.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
 		initEAttribute(getHDFSServer_GroupIds(), ecorePackage.getEString(), "groupIds", null, 0, -1, HDFSServer.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
+		initEAttribute(getHDFSServer_Version(), ecorePackage.getEString(), "version", "1.0.0.0", 0, 1, HDFSServer.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
 
 		initEClass(serversEClass, Servers.class, "Servers", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
 		initEReference(getServers_HdfsServers(), this.getHDFSServer(), null, "hdfsServers", null, 0, -1, Servers.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/34799cec/org.apache.hdt.core/src/org/apache/hdt/core/internal/model/util/HadoopSwitch.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.core/src/org/apache/hdt/core/internal/model/util/HadoopSwitch.java b/org.apache.hdt.core/src/org/apache/hdt/core/internal/model/util/HadoopSwitch.java
index 6f0b337..c56f87e 100644
--- a/org.apache.hdt.core/src/org/apache/hdt/core/internal/model/util/HadoopSwitch.java
+++ b/org.apache.hdt.core/src/org/apache/hdt/core/internal/model/util/HadoopSwitch.java
@@ -24,6 +24,8 @@ import org.apache.hdt.core.internal.model.*;
 
 import org.eclipse.emf.ecore.EClass;
 import org.eclipse.emf.ecore.EObject;
+import org.eclipse.emf.ecore.EPackage;
+import org.eclipse.emf.ecore.util.Switch;
 
 /**
  * <!-- begin-user-doc -->
@@ -38,7 +40,7 @@ import org.eclipse.emf.ecore.EObject;
  * @see org.apache.hdt.core.internal.model.HadoopPackage
  * @generated
  */
-public class HadoopSwitch<T> {
+public class HadoopSwitch<T> extends Switch<T> {
 	/**
 	 * The cached model package
 	 * <!-- begin-user-doc -->
@@ -60,34 +62,16 @@ public class HadoopSwitch<T> {
 	}
 
 	/**
-	 * Calls <code>caseXXX</code> for each class of the model until one returns a non null result; it yields that result.
+	 * Checks whether this is a switch for the given package.
 	 * <!-- begin-user-doc -->
 	 * <!-- end-user-doc -->
-	 * @return the first non-null result returned by a <code>caseXXX</code> call.
+	 * @parameter ePackage the package in question.
+	 * @return whether this is a switch for the given package.
 	 * @generated
 	 */
-	public T doSwitch(EObject theEObject) {
-		return doSwitch(theEObject.eClass(), theEObject);
-	}
-
-	/**
-	 * Calls <code>caseXXX</code> for each class of the model until one returns a non null result; it yields that result.
-	 * <!-- begin-user-doc -->
-	 * <!-- end-user-doc -->
-	 * @return the first non-null result returned by a <code>caseXXX</code> call.
-	 * @generated
-	 */
-	protected T doSwitch(EClass theEClass, EObject theEObject) {
-		if (theEClass.eContainer() == modelPackage) {
-			return doSwitch(theEClass.getClassifierID(), theEObject);
-		}
-		else {
-			List<EClass> eSuperTypes = theEClass.getESuperTypes();
-			return
-				eSuperTypes.isEmpty() ?
-					defaultCase(theEObject) :
-					doSwitch(eSuperTypes.get(0), theEObject);
-		}
+	@Override
+	protected boolean isSwitchFor(EPackage ePackage) {
+		return ePackage == modelPackage;
 	}
 
 	/**
@@ -97,6 +81,7 @@ public class HadoopSwitch<T> {
 	 * @return the first non-null result returned by a <code>caseXXX</code> call.
 	 * @generated
 	 */
+	@Override
 	protected T doSwitch(int classifierID, EObject theEObject) {
 		switch (classifierID) {
 			case HadoopPackage.HDFS_SERVER: {
@@ -222,6 +207,7 @@ public class HadoopSwitch<T> {
 	 * @see #doSwitch(org.eclipse.emf.ecore.EObject)
 	 * @generated
 	 */
+	@Override
 	public T defaultCase(EObject object) {
 		return null;
 	}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/34799cec/org.apache.hdt.core/src/org/apache/hdt/core/launch/AbstractHadoopCluster.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.core/src/org/apache/hdt/core/launch/AbstractHadoopCluster.java b/org.apache.hdt.core/src/org/apache/hdt/core/launch/AbstractHadoopCluster.java
index cd06f0e..47d00f4 100644
--- a/org.apache.hdt.core/src/org/apache/hdt/core/launch/AbstractHadoopCluster.java
+++ b/org.apache.hdt.core/src/org/apache/hdt/core/launch/AbstractHadoopCluster.java
@@ -24,11 +24,17 @@ import java.util.Collection;
 import java.util.Iterator;
 import java.util.Map.Entry;
 
+import org.apache.hdt.core.Activator;
+import org.apache.hdt.core.internal.HadoopManager;
+import org.apache.log4j.Logger;
 import org.eclipse.core.runtime.CoreException;
 import org.eclipse.core.runtime.IConfigurationElement;
 import org.eclipse.core.runtime.Platform;
+import org.eclipse.core.runtime.Status;
 
 public abstract class AbstractHadoopCluster {
+	
+	private static final Logger logger = Logger.getLogger(AbstractHadoopCluster.class);
 
 	abstract public String getLocationName();
 
@@ -44,14 +50,14 @@ public abstract class AbstractHadoopCluster {
 
 	abstract public void load(AbstractHadoopCluster server);
 
-	abstract public String getConfProp(String propName);
-
-	abstract public String getConfProp(ConfProp prop);
+	abstract public String getConfPropValue(String propName);
 
-	abstract public void setConfProp(ConfProp prop, String propValue);
+	abstract public String getConfPropValue(ConfProp prop);
 
-	abstract public void setConfProp(String propName, String propValue);
+	abstract public void setConfPropValue(ConfProp prop, String propValue);
 
+	abstract public void setConfPropValue(String propName, String propValue);
+	
 	abstract public Iterator<Entry<String, String>> getConfiguration();
 
 	abstract public void purgeJob(IHadoopJob job);
@@ -66,23 +72,42 @@ public abstract class AbstractHadoopCluster {
 	
 	abstract public boolean isAvailable() throws CoreException;
 	
+	abstract public String getVersion();
+	
 	public static AbstractHadoopCluster createCluster(File file) throws CoreException, IOException {
-		AbstractHadoopCluster hadoopCluster = createCluster();
+		AbstractHadoopCluster hadoopCluster = createCluster(ConfProp.PI_HADOOP_VERSION.defVal);
 		hadoopCluster.loadFromXML(file);
 		return hadoopCluster;
 	}
 
-	public static AbstractHadoopCluster createCluster() throws CoreException {
+	public static AbstractHadoopCluster createCluster(String hadoopVersion) throws CoreException {
+		logger.debug("Creating client for version "+hadoopVersion); 
 		IConfigurationElement[] elementsFor = Platform.getExtensionRegistry().getConfigurationElementsFor("org.apache.hdt.core.hadoopCluster");
-		return (AbstractHadoopCluster) elementsFor[0].createExecutableExtension("class");
+		for (IConfigurationElement configElement : elementsFor) {
+			String version = configElement.getAttribute("protocolVersion");
+			if(version.equalsIgnoreCase(hadoopVersion)){
+				return (AbstractHadoopCluster)configElement.createExecutableExtension("class");
+			}
+		}
+		throw new CoreException(new Status(Status.ERROR,Activator.BUNDLE_ID,"No clinet found for hadoop version "+hadoopVersion));
 	}
 
 	public static AbstractHadoopCluster createCluster(AbstractHadoopCluster existing) throws CoreException {
-		AbstractHadoopCluster hadoopCluster = createCluster();
+		AbstractHadoopCluster hadoopCluster = createCluster(existing.getVersion());
 		hadoopCluster.load(existing);
 		return hadoopCluster;
 	}
 
+	/**
+	 * @param propName
+	 * @return
+	 */
+	public ConfProp getConfPropForName(String propName) {
+		return ConfProp.getByName(propName);
+	}
 	
+	public String getConfPropName(ConfProp prop) {
+		return prop.name;
+	}
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/34799cec/org.apache.hdt.core/src/org/apache/hdt/core/launch/ConfProp.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.core/src/org/apache/hdt/core/launch/ConfProp.java b/org.apache.hdt.core/src/org/apache/hdt/core/launch/ConfProp.java
index c7c64f9..8b91dbe 100644
--- a/org.apache.hdt.core/src/org/apache/hdt/core/launch/ConfProp.java
+++ b/org.apache.hdt.core/src/org/apache/hdt/core/launch/ConfProp.java
@@ -26,6 +26,11 @@ public enum ConfProp {
 	 * Property name for the Hadoop location name
 	 */
 	PI_LOCATION_NAME(true, "location.name", "New Hadoop location"),
+	
+	/**
+	 * Property name for the Hadoop Version
+	 */
+	PI_HADOOP_VERSION(true, "hadoop.version", "1.1"),
 
 	/**
 	 * Property name for the master host name (the Job tracker)
@@ -116,8 +121,7 @@ public enum ConfProp {
 	public static ConfProp getByName(String propName) {
 		return map.get(propName);
 	}
-
-	public final String name;
+	protected  final String name;
 
 	public final String defVal;
 

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/34799cec/org.apache.hdt.feature/.classpath
----------------------------------------------------------------------
diff --git a/org.apache.hdt.feature/.classpath b/org.apache.hdt.feature/.classpath
index 4c2b7c4..39b5586 100644
--- a/org.apache.hdt.feature/.classpath
+++ b/org.apache.hdt.feature/.classpath
@@ -5,5 +5,6 @@
   <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
   <classpathentry kind="src" path="/org.apache.hdt.core"/>
   <classpathentry kind="src" path="/org.apache.hdt.hadoop.release"/>
+  <classpathentry kind="src" path="/org.apache.hdt.hadoop2.release"/>
   <classpathentry kind="src" path="/org.apache.hdt.ui"/>
-</classpath>
\ No newline at end of file
+</classpath>

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/34799cec/org.apache.hdt.feature/.project
----------------------------------------------------------------------
diff --git a/org.apache.hdt.feature/.project b/org.apache.hdt.feature/.project
index 017e5f9..6aff5d5 100644
--- a/org.apache.hdt.feature/.project
+++ b/org.apache.hdt.feature/.project
@@ -5,6 +5,7 @@
 	<projects>
 		<project>org.apache.hdt.core</project>
 		<project>org.apache.hdt.hadoop.release</project>
+		<project>org.apache.hdt.hadoop2.release</project>
 		<project>org.apache.hdt.ui</project>
 	</projects>
 	<buildSpec>

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/34799cec/org.apache.hdt.feature/feature.xml
----------------------------------------------------------------------
diff --git a/org.apache.hdt.feature/feature.xml b/org.apache.hdt.feature/feature.xml
index 0f13637..120c73f 100644
--- a/org.apache.hdt.feature/feature.xml
+++ b/org.apache.hdt.feature/feature.xml
@@ -35,7 +35,7 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
 or implied. See the License for the specific language governing
 permissions and limitations under the License.
    </license>
-
+   
    <plugin
          id="org.apache.hdt.hadoop.release"
          download-size="0"
@@ -45,6 +45,14 @@ permissions and limitations under the License.
          unpack="false"/>
 
    <plugin
+         id="org.apache.hdt.hadoop2.release"
+         download-size="0"
+         install-size="0"
+         version="0.0.2.qualifier"
+         fragment="true"
+         unpack="false"/>
+
+   <plugin
          id="org.apache.hdt.ui"
          download-size="0"
          install-size="0"

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/34799cec/org.apache.hdt.hadoop.release/META-INF/MANIFEST.MF
----------------------------------------------------------------------
diff --git a/org.apache.hdt.hadoop.release/META-INF/MANIFEST.MF b/org.apache.hdt.hadoop.release/META-INF/MANIFEST.MF
index db5e83c..ec6c80c 100644
--- a/org.apache.hdt.hadoop.release/META-INF/MANIFEST.MF
+++ b/org.apache.hdt.hadoop.release/META-INF/MANIFEST.MF
@@ -4,8 +4,13 @@ Bundle-Name: Apache Hadoop 0.0.1.qualifier Release Eclipse Plugin
 Bundle-SymbolicName: org.apache.hdt.hadoop.release;singleton:=true
 Bundle-Version: 0.0.2.qualifier
 Bundle-Vendor: Apache Hadoop
-Fragment-Host: org.apache.hdt.core
 Bundle-RequiredExecutionEnvironment: JavaSE-1.6
+Require-Bundle: org.apache.hdt.core,
+ org.eclipse.core.runtime,
+ org.eclipse.core.filesystem;bundle-version="1.3.0";visibility:=reexport,
+ org.eclipse.core.resources;bundle-version="3.6.0",
+ org.eclipse.swt,
+ org.eclipse.jface
 Bundle-ClassPath: .,
  jars/zookeeper-3.4.5.jar,
  jars/slf4j-log4j12-1.6.1.jar,

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/34799cec/org.apache.hdt.hadoop.release/build.properties
----------------------------------------------------------------------
diff --git a/org.apache.hdt.hadoop.release/build.properties b/org.apache.hdt.hadoop.release/build.properties
index 4c1d15a..848ab4a 100644
--- a/org.apache.hdt.hadoop.release/build.properties
+++ b/org.apache.hdt.hadoop.release/build.properties
@@ -19,5 +19,5 @@ source.. = src/
 output.. = bin/
 bin.includes = META-INF/,\
                .,\
-               fragment.xml,\
+               plugin.xml,\
                jars/

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/34799cec/org.apache.hdt.hadoop.release/fragment.xml
----------------------------------------------------------------------
diff --git a/org.apache.hdt.hadoop.release/fragment.xml b/org.apache.hdt.hadoop.release/fragment.xml
deleted file mode 100644
index 729d38f..0000000
--- a/org.apache.hdt.hadoop.release/fragment.xml
+++ /dev/null
@@ -1,43 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<?eclipse version="3.4"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<fragment>
-   <extension
-         point="org.apache.hdt.core.hdfsClient">
-      <hdfsClient
-            class="org.apache.hdt.hadoop.release.HDFSClientRelease"
-            protocol="hdfs"
-            protocolVersion="1.1.2.21">
-      </hdfsClient>
-   </extension>
-   <extension
-         point="org.apache.hdt.core.zookeeperClient">
-      <zookeeperClient
-            class="org.apache.hdt.hadoop.release.ZooKeeperClientRelease"
-            protocolVersion="3.4.5">
-      </zookeeperClient>
-   </extension>
-   <extension
-         point="org.apache.hdt.core.hadoopCluster">
-      <hadoopCluster
-            class="org.apache.hdt.hadoop.release.HadoopCluster"
-            protocolVersion="1.1">
-      </hadoopCluster>
-   </extension>
-
-</fragment>

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/34799cec/org.apache.hdt.hadoop.release/plugin.xml
----------------------------------------------------------------------
diff --git a/org.apache.hdt.hadoop.release/plugin.xml b/org.apache.hdt.hadoop.release/plugin.xml
new file mode 100644
index 0000000..476bdcd
--- /dev/null
+++ b/org.apache.hdt.hadoop.release/plugin.xml
@@ -0,0 +1,43 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<?eclipse version="3.4"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<plugin>
+   <extension
+         point="org.apache.hdt.core.hdfsClient">
+      <hdfsClient
+            class="org.apache.hdt.hadoop.release.HDFSClientRelease"
+            protocol="hdfs"
+            protocolVersion="1.1">
+      </hdfsClient>
+   </extension>
+   <extension
+         point="org.apache.hdt.core.zookeeperClient">
+      <zookeeperClient
+            class="org.apache.hdt.hadoop.release.ZooKeeperClientRelease"
+            protocolVersion="3.4.5">
+      </zookeeperClient>
+   </extension>
+   <extension
+         point="org.apache.hdt.core.hadoopCluster">
+      <hadoopCluster
+            class="org.apache.hdt.hadoop.release.HadoopCluster"
+            protocolVersion="1.1">
+      </hadoopCluster>
+   </extension>
+
+</plugin>

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/34799cec/org.apache.hdt.hadoop.release/src/org/apache/hdt/hadoop/release/HadoopCluster.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.hadoop.release/src/org/apache/hdt/hadoop/release/HadoopCluster.java b/org.apache.hdt.hadoop.release/src/org/apache/hdt/hadoop/release/HadoopCluster.java
index 67fcb75..0014bb6 100644
--- a/org.apache.hdt.hadoop.release/src/org/apache/hdt/hadoop/release/HadoopCluster.java
+++ b/org.apache.hdt.hadoop.release/src/org/apache/hdt/hadoop/release/HadoopCluster.java
@@ -28,6 +28,7 @@ import java.util.Collections;
 import java.util.HashSet;
 import java.util.Iterator;
 import java.util.Map;
+import java.util.ServiceLoader;
 import java.util.Map.Entry;
 import java.util.Set;
 import java.util.TreeMap;
@@ -37,7 +38,6 @@ import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
 import java.util.concurrent.TimeUnit;
-import java.util.concurrent.TimeoutException;
 import java.util.logging.Logger;
 
 import javax.xml.parsers.DocumentBuilder;
@@ -53,13 +53,11 @@ import org.apache.hadoop.mapred.JobID;
 import org.apache.hadoop.mapred.JobStatus;
 import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hdt.core.Activator;
-import org.apache.hdt.core.launch.ConfProp;
 import org.apache.hdt.core.launch.AbstractHadoopCluster;
+import org.apache.hdt.core.launch.ConfProp;
 import org.apache.hdt.core.launch.IHadoopJob;
 import org.apache.hdt.core.launch.IJarModule;
 import org.apache.hdt.core.launch.IJobListener;
-import org.eclipse.core.internal.utils.FileUtil;
-import org.eclipse.core.resources.WorkspaceJob;
 import org.eclipse.core.runtime.CoreException;
 import org.eclipse.core.runtime.IProgressMonitor;
 import org.eclipse.core.runtime.IStatus;
@@ -128,16 +126,18 @@ public class HadoopCluster extends AbstractHadoopCluster {
 							+ HadoopCluster.this.getLocationName(), ioe);
 				}
 			}
-
+			Thread current = Thread.currentThread();
+			ClassLoader oldLoader = current.getContextClassLoader();
 			try {
+				current.setContextClassLoader(HadoopCluster.class.getClassLoader());
 				// Set of all known existing Job IDs we want fresh info of
 				Set<JobID> missingJobIds = new HashSet<JobID>(runningJobs.keySet());
 
 				JobStatus[] jstatus = client.jobsToComplete();
 				jstatus = jstatus == null ? new JobStatus[0] : jstatus;
-				for (JobStatus status : jstatus) {
+				for (final JobStatus status : jstatus) {
 
-					JobID jobId = status.getJobID();
+					final JobID jobId = status.getJobID();
 					missingJobIds.remove(jobId);
 
 					HadoopJob hJob;
@@ -145,7 +145,11 @@ public class HadoopCluster extends AbstractHadoopCluster {
 						hJob = runningJobs.get(jobId);
 						if (hJob == null) {
 							// Unknown job, create an entry
-							RunningJob running = client.getJob(jobId);
+							final RunningJob running = client.getJob(jobId);
+							ServiceLoader<FileSystem> serviceLoader = ServiceLoader.load(FileSystem.class);
+					        for (FileSystem fs : serviceLoader) {
+					        	System.out.println(fs.getClass().getProtectionDomain().getCodeSource().getLocation());
+					        }
 							hJob = new HadoopJob(HadoopCluster.this, jobId, running, status);
 							newJob(hJob);
 						}
@@ -166,7 +170,9 @@ public class HadoopCluster extends AbstractHadoopCluster {
 				client = null;
 				return new Status(Status.ERROR, Activator.BUNDLE_ID, 0, "Cannot retrieve running Jobs on location: " + HadoopCluster.this.getLocationName(),
 						ioe);
-			}
+			}finally {
+                current.setContextClassLoader(oldLoader);
+             }
 
 			// Schedule the next observation
 			schedule(STATUS_OBSERVATION_DELAY);
@@ -321,8 +327,8 @@ public class HadoopCluster extends AbstractHadoopCluster {
 	 *            the configuration property
 	 * @return the property value
 	 */
-	public String getConfProp(ConfProp prop) {
-		return conf.get(prop.name);
+	public String getConfPropValue(ConfProp prop) {
+		return conf.get(getConfPropName(prop));
 	}
 
 	/**
@@ -332,12 +338,12 @@ public class HadoopCluster extends AbstractHadoopCluster {
 	 *            the property name
 	 * @return the property value
 	 */
-	public String getConfProp(String propName) {
+	public String getConfPropValue(String propName) {
 		return this.conf.get(propName);
 	}
 
 	public String getLocationName() {
-		return getConfProp(ConfProp.PI_LOCATION_NAME);
+		return getConfPropValue(ConfProp.PI_LOCATION_NAME);
 	}
 
 	/**
@@ -346,7 +352,7 @@ public class HadoopCluster extends AbstractHadoopCluster {
 	 * @return the host name of the Job tracker
 	 */
 	public String getMasterHostName() {
-		return getConfProp(ConfProp.PI_JOB_TRACKER_HOST);
+		return getConfPropValue(ConfProp.PI_JOB_TRACKER_HOST);
 	}
 
 	public String getState() {
@@ -432,25 +438,18 @@ public class HadoopCluster extends AbstractHadoopCluster {
 	 * @param propvalue
 	 *            the property value
 	 */
-	public void setConfProp(ConfProp prop, String propValue) {
+	public void setConfPropValue(ConfProp prop, String propValue) {
 		if (propValue != null)
-			conf.set(prop.name, propValue);
+			setConfPropValue(getConfPropName(prop), propValue);
 	}
 
-	/**
-	 * Sets a Hadoop configuration property value
-	 * 
-	 * @param propName
-	 *            the property name
-	 * @param propValue
-	 *            the property value
-	 */
-	public void setConfProp(String propName, String propValue) {
-		this.conf.set(propName, propValue);
+	@Override
+	public void setConfPropValue(String propName, String propValue) {
+		conf.set(propName, propValue);
 	}
-
+	
 	public void setLocationName(String newName) {
-		setConfProp(ConfProp.PI_LOCATION_NAME, newName);
+		setConfPropValue(ConfProp.PI_LOCATION_NAME, newName);
 	}
 
 	/**
@@ -483,7 +482,7 @@ public class HadoopCluster extends AbstractHadoopCluster {
 	 */
 	private void addPluginConfigDefaultProperties() {
 		for (ConfProp prop : ConfProp.values()) {
-			conf.set(prop.name, prop.defVal);
+			conf.set(getConfPropName(prop), prop.defVal);
 		}
 	}
 
@@ -599,4 +598,12 @@ public class HadoopCluster extends AbstractHadoopCluster {
 					Activator.BUNDLE_ID, "unable to connect to server", e));
 		}
 	}
+
+	/* (non-Javadoc)
+	 * @see org.apache.hdt.core.launch.AbstractHadoopCluster#getVersion()
+	 */
+	@Override
+	public String getVersion() {
+		return "1.1";
+	}
 }

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/34799cec/org.apache.hdt.hadoop.release/src/org/apache/hdt/hadoop/release/HadoopJob.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.hadoop.release/src/org/apache/hdt/hadoop/release/HadoopJob.java b/org.apache.hdt.hadoop.release/src/org/apache/hdt/hadoop/release/HadoopJob.java
index 5861967..9200674 100644
--- a/org.apache.hdt.hadoop.release/src/org/apache/hdt/hadoop/release/HadoopJob.java
+++ b/org.apache.hdt.hadoop.release/src/org/apache/hdt/hadoop/release/HadoopJob.java
@@ -31,6 +31,7 @@ import org.apache.hadoop.mapred.JobStatus;
 import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hdt.core.launch.AbstractHadoopCluster;
 import org.apache.hdt.core.launch.IHadoopJob;
+import org.eclipse.core.runtime.internal.adaptor.ContextFinder;
 
 /**
  * Representation of a Map/Reduce running job on a given location
@@ -125,6 +126,7 @@ public class HadoopJob implements IHadoopJob {
 	 * @param status
 	 */
 	public HadoopJob(HadoopCluster location, JobID id, RunningJob running, JobStatus status) {
+		//HadoopCluster.updateCurrentClassLoader();
 
 		this.location = location;
 		this.jobId = id;

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/34799cec/org.apache.hdt.hadoop2.release/.classpath
----------------------------------------------------------------------
diff --git a/org.apache.hdt.hadoop2.release/.classpath b/org.apache.hdt.hadoop2.release/.classpath
new file mode 100644
index 0000000..d59ac75
--- /dev/null
+++ b/org.apache.hdt.hadoop2.release/.classpath
@@ -0,0 +1,91 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
+	<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
+	<classpathentry kind="src" path="src/"/>
+	<classpathentry exported="true" kind="lib" path="jars/activation-1.1.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/aopalliance-1.0.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/asm-3.1.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/avro-1.7.4.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/commons-beanutils-1.7.0.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/commons-beanutils-core-1.8.0.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/commons-cli-1.2.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/commons-codec-1.4.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/commons-collections-3.2.1.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/commons-compress-1.4.1.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/commons-configuration-1.6.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/commons-digester-1.8.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/commons-el-1.0.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/commons-httpclient-3.1.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/commons-io-2.1.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/commons-lang-2.4.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/commons-logging-1.1.1.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/commons-math-2.1.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/commons-net-3.1.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/gmbal-api-only-3.0.0-b023.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/grizzly-framework-2.1.2.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/grizzly-http-2.1.2.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/grizzly-http-server-2.1.2.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/grizzly-http-servlet-2.1.2.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/grizzly-rcm-2.1.2.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/guava-11.0.2.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/guice-3.0.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/guice-servlet-3.0.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/hadoop-annotations-2.2.0.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/hadoop-auth-2.2.0.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/hadoop-client-2.2.0.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/hadoop-common-2.2.0.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/hadoop-hdfs-2.2.0.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/hadoop-mapreduce-client-app-2.2.0.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/hadoop-mapreduce-client-common-2.2.0.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/hadoop-mapreduce-client-core-2.2.0.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/hadoop-mapreduce-client-jobclient-2.2.0.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/hadoop-mapreduce-client-shuffle-2.2.0.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/hadoop-yarn-api-2.2.0.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/hadoop-yarn-client-2.2.0.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/hadoop-yarn-common-2.2.0.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/hadoop-yarn-server-common-2.2.0.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/hadoop-yarn-server-tests-2.2.0.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/hamcrest-core-1.3.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/jackson-core-asl-1.8.8.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/jackson-jaxrs-1.8.3.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/jackson-mapper-asl-1.8.8.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/jackson-xc-1.8.3.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/jasper-compiler-5.5.23.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/jasper-runtime-5.5.23.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/javax.inject-1.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/javax.servlet-3.1.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/javax.servlet-api-3.0.1.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/jaxb-api-2.2.2.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/jaxb-impl-2.2.3-1.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/jersey-client-1.9.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/jersey-core-1.9.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/jersey-grizzly2-1.9.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/jersey-guice-1.9.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/jersey-json-1.9.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/jersey-server-1.9.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/jersey-test-framework-core-1.9.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/jersey-test-framework-grizzly2-1.9.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/jets3t-0.6.1.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/jettison-1.1.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/jetty-6.1.26.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/jetty-util-6.1.26.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/jsch-0.1.42.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/jsp-api-2.1.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/jsr305-1.3.9.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/junit-4.11.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/log4j-1.2.15.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/management-api-3.0.0-b012.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/netty-3.6.2.Final.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/paranamer-2.3.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/protobuf-java-2.5.0.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/servlet-api-2.5.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/slf4j-api-1.6.1.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/slf4j-log4j12-1.6.1.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/snappy-java-1.0.4.1.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/stax-api-1.0.1.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/xmlenc-0.52.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/xz-1.0.jar"/>
+	<classpathentry exported="true" kind="lib" path="jars/zookeeper-3.4.5.jar"/>
+	<classpathentry kind="output" path="target/classes"/>
+</classpath>

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/34799cec/org.apache.hdt.hadoop2.release/.settings/org.eclipse.core.resources.prefs
----------------------------------------------------------------------
diff --git a/org.apache.hdt.hadoop2.release/.settings/org.eclipse.core.resources.prefs b/org.apache.hdt.hadoop2.release/.settings/org.eclipse.core.resources.prefs
new file mode 100644
index 0000000..99f26c0
--- /dev/null
+++ b/org.apache.hdt.hadoop2.release/.settings/org.eclipse.core.resources.prefs
@@ -0,0 +1,2 @@
+eclipse.preferences.version=1
+encoding/<project>=UTF-8

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/34799cec/org.apache.hdt.hadoop2.release/.settings/org.eclipse.jdt.core.prefs
----------------------------------------------------------------------
diff --git a/org.apache.hdt.hadoop2.release/.settings/org.eclipse.jdt.core.prefs b/org.apache.hdt.hadoop2.release/.settings/org.eclipse.jdt.core.prefs
new file mode 100644
index 0000000..c537b63
--- /dev/null
+++ b/org.apache.hdt.hadoop2.release/.settings/org.eclipse.jdt.core.prefs
@@ -0,0 +1,7 @@
+eclipse.preferences.version=1
+org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
+org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
+org.eclipse.jdt.core.compiler.compliance=1.6
+org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
+org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
+org.eclipse.jdt.core.compiler.source=1.6

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/34799cec/org.apache.hdt.hadoop2.release/.settings/org.eclipse.m2e.core.prefs
----------------------------------------------------------------------
diff --git a/org.apache.hdt.hadoop2.release/.settings/org.eclipse.m2e.core.prefs b/org.apache.hdt.hadoop2.release/.settings/org.eclipse.m2e.core.prefs
new file mode 100644
index 0000000..f897a7f
--- /dev/null
+++ b/org.apache.hdt.hadoop2.release/.settings/org.eclipse.m2e.core.prefs
@@ -0,0 +1,4 @@
+activeProfiles=
+eclipse.preferences.version=1
+resolveWorkspaceProjects=true
+version=1

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/34799cec/org.apache.hdt.hadoop2.release/META-INF/MANIFEST.MF
----------------------------------------------------------------------
diff --git a/org.apache.hdt.hadoop2.release/META-INF/MANIFEST.MF b/org.apache.hdt.hadoop2.release/META-INF/MANIFEST.MF
new file mode 100644
index 0000000..eb51451
--- /dev/null
+++ b/org.apache.hdt.hadoop2.release/META-INF/MANIFEST.MF
@@ -0,0 +1,98 @@
+Manifest-Version: 1.0
+Bundle-ManifestVersion: 2
+Bundle-Name: Apache Hadoop2 Release Eclipse Plugin
+Bundle-SymbolicName: org.apache.hdt.hadoop2.release;singleton:=true
+Bundle-Version: 0.0.2.qualifier
+Bundle-Vendor: Apache Hadoop
+Bundle-RequiredExecutionEnvironment: JavaSE-1.6
+Require-Bundle: org.apache.hdt.core,
+ org.eclipse.core.runtime,
+ org.eclipse.core.filesystem;bundle-version="1.3.0";visibility:=reexport,
+ org.eclipse.core.resources;bundle-version="3.6.0",
+ org.eclipse.swt,
+ org.eclipse.jface
+Bundle-ClassPath: .,
+ jars/activation-1.1.jar,
+ jars/aopalliance-1.0.jar,
+ jars/asm-3.1.jar,
+ jars/avro-1.7.4.jar,
+ jars/commons-beanutils-1.7.0.jar,
+ jars/commons-beanutils-core-1.8.0.jar,
+ jars/commons-cli-1.2.jar,
+ jars/commons-codec-1.4.jar,
+ jars/commons-collections-3.2.1.jar,
+ jars/commons-compress-1.4.1.jar,
+ jars/commons-configuration-1.6.jar,
+ jars/commons-digester-1.8.jar,
+ jars/commons-el-1.0.jar,
+ jars/commons-httpclient-3.1.jar,
+ jars/commons-io-2.1.jar,
+ jars/commons-lang-2.4.jar,
+ jars/commons-logging-1.1.1.jar,
+ jars/commons-math-2.1.jar,
+ jars/commons-net-3.1.jar,
+ jars/gmbal-api-only-3.0.0-b023.jar,
+ jars/grizzly-framework-2.1.2.jar,
+ jars/grizzly-http-2.1.2.jar,
+ jars/grizzly-http-server-2.1.2.jar,
+ jars/grizzly-http-servlet-2.1.2.jar,
+ jars/grizzly-rcm-2.1.2.jar,
+ jars/guava-11.0.2.jar,
+ jars/guice-3.0.jar,
+ jars/guice-servlet-3.0.jar,
+ jars/hadoop-annotations-2.2.0.jar,
+ jars/hadoop-auth-2.2.0.jar,
+ jars/hadoop-client-2.2.0.jar,
+ jars/hadoop-common-2.2.0.jar,
+ jars/hadoop-hdfs-2.2.0.jar,
+ jars/hadoop-mapreduce-client-app-2.2.0.jar,
+ jars/hadoop-mapreduce-client-common-2.2.0.jar,
+ jars/hadoop-mapreduce-client-core-2.2.0.jar,
+ jars/hadoop-mapreduce-client-jobclient-2.2.0.jar,
+ jars/hadoop-mapreduce-client-shuffle-2.2.0.jar,
+ jars/hadoop-yarn-api-2.2.0.jar,
+ jars/hadoop-yarn-client-2.2.0.jar,
+ jars/hadoop-yarn-common-2.2.0.jar,
+ jars/hadoop-yarn-server-common-2.2.0.jar,
+ jars/hadoop-yarn-server-tests-2.2.0.jar,
+ jars/hamcrest-core-1.3.jar,
+ jars/jackson-core-asl-1.8.8.jar,
+ jars/jackson-jaxrs-1.8.3.jar,
+ jars/jackson-mapper-asl-1.8.8.jar,
+ jars/jackson-xc-1.8.3.jar,
+ jars/jasper-compiler-5.5.23.jar,
+ jars/jasper-runtime-5.5.23.jar,
+ jars/javax.inject-1.jar,
+ jars/javax.servlet-3.1.jar,
+ jars/javax.servlet-api-3.0.1.jar,
+ jars/jaxb-api-2.2.2.jar,
+ jars/jaxb-impl-2.2.3-1.jar,
+ jars/jersey-client-1.9.jar,
+ jars/jersey-core-1.9.jar,
+ jars/jersey-grizzly2-1.9.jar,
+ jars/jersey-guice-1.9.jar,
+ jars/jersey-json-1.9.jar,
+ jars/jersey-server-1.9.jar,
+ jars/jersey-test-framework-core-1.9.jar,
+ jars/jersey-test-framework-grizzly2-1.9.jar,
+ jars/jets3t-0.6.1.jar,
+ jars/jettison-1.1.jar,
+ jars/jetty-6.1.26.jar,
+ jars/jetty-util-6.1.26.jar,
+ jars/jsch-0.1.42.jar,
+ jars/jsp-api-2.1.jar,
+ jars/jsr305-1.3.9.jar,
+ jars/junit-4.11.jar,
+ jars/log4j-1.2.15.jar,
+ jars/management-api-3.0.0-b012.jar,
+ jars/netty-3.6.2.Final.jar,
+ jars/paranamer-2.3.jar,
+ jars/protobuf-java-2.5.0.jar,
+ jars/servlet-api-2.5.jar,
+ jars/slf4j-api-1.6.1.jar,
+ jars/slf4j-log4j12-1.6.1.jar,
+ jars/snappy-java-1.0.4.1.jar,
+ jars/stax-api-1.0.1.jar,
+ jars/xmlenc-0.52.jar,
+ jars/xz-1.0.jar,
+ jars/zookeeper-3.4.5.jar

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/34799cec/org.apache.hdt.hadoop2.release/build.properties
----------------------------------------------------------------------
diff --git a/org.apache.hdt.hadoop2.release/build.properties b/org.apache.hdt.hadoop2.release/build.properties
new file mode 100644
index 0000000..848ab4a
--- /dev/null
+++ b/org.apache.hdt.hadoop2.release/build.properties
@@ -0,0 +1,23 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+source.. = src/
+output.. = bin/
+bin.includes = META-INF/,\
+               .,\
+               plugin.xml,\
+               jars/

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/34799cec/org.apache.hdt.hadoop2.release/plugin.xml
----------------------------------------------------------------------
diff --git a/org.apache.hdt.hadoop2.release/plugin.xml b/org.apache.hdt.hadoop2.release/plugin.xml
new file mode 100644
index 0000000..b200aca
--- /dev/null
+++ b/org.apache.hdt.hadoop2.release/plugin.xml
@@ -0,0 +1,35 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<?eclipse version="3.4"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<plugin>
+<extension
+         point="org.apache.hdt.core.hadoopCluster" >
+      <hadoopCluster
+            class="org.apache.hdt.hadoop2.release.HadoopCluster"
+            protocolVersion="2.2">
+      </hadoopCluster>
+   </extension>
+   <extension
+         point="org.apache.hdt.core.hdfsClient">
+      <hdfsClient
+            class="org.apache.hdt.hadoop2.release.HDFSClientRelease"
+            protocol="hdfs"
+            protocolVersion="2.2">
+      </hdfsClient>
+   </extension>
+</plugin>

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/34799cec/org.apache.hdt.hadoop2.release/pom.xml
----------------------------------------------------------------------
diff --git a/org.apache.hdt.hadoop2.release/pom.xml b/org.apache.hdt.hadoop2.release/pom.xml
new file mode 100644
index 0000000..249ad6e
--- /dev/null
+++ b/org.apache.hdt.hadoop2.release/pom.xml
@@ -0,0 +1,127 @@
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <relativePath>../pom.xml</relativePath>
+    <groupId>org.apache.hdt</groupId>
+    <artifactId>hdt.master</artifactId>
+    <version>0.0.2-SNAPSHOT</version>
+  </parent>
+  <artifactId>org.apache.hdt.hadoop2.release</artifactId>
+  <packaging>eclipse-plugin</packaging>
+  <name>Apache Hadoop2 Devlopment Tools Assembly</name>
+  
+  <properties>
+    <hadoop2.version>2.2.0</hadoop2.version>
+  </properties>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop2.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <version>${hadoop2.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-client</artifactId>
+      <version>${hadoop2.version}</version>
+    </dependency>
+  </dependencies>
+  
+  <build>
+    <sourceDirectory>src</sourceDirectory>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <version>2.8</version>
+        <executions>
+          <execution>
+            <id>copy-dependencies</id>
+            <phase>initialize</phase>
+            <goals>
+              <goal>copy-dependencies</goal>
+            </goals>
+            <configuration>
+              <excludeScope>system</excludeScope>
+              <outputDirectory>${basedir}/jars</outputDirectory>
+              <overWriteReleases>false</overWriteReleases>
+              <overWriteSnapshots>false</overWriteSnapshots>
+              <overWriteIfNewer>true</overWriteIfNewer>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <version>2.8</version>
+        <executions>
+          <execution>
+            <id>copy</id>
+            <phase>initialize</phase>
+            <goals>
+              <goal>copy</goal>
+            </goals>
+            <configuration>
+              <artifactItems>
+                <artifactItem>
+                  <groupId>log4j</groupId>
+                  <artifactId>log4j</artifactId>
+                  <overWrite>false</overWrite>
+                </artifactItem>
+                <artifactItem>
+                  <groupId>org.apache.hadoop</groupId>
+                  <artifactId>hadoop-yarn-server-tests</artifactId>
+                  <version>${hadoop2.version}</version>
+                  <overWrite>false</overWrite>
+                </artifactItem>
+                <artifactItem>
+                  <groupId>org.apache.zookeeper</groupId>
+                  <artifactId>zookeeper</artifactId>
+                  <overWrite>false</overWrite>
+                </artifactItem>
+                <artifactItem>
+                  <groupId>org.slf4j</groupId>
+                  <artifactId>slf4j-api</artifactId>
+                  <overWrite>false</overWrite>
+                </artifactItem>
+                <artifactItem>
+                  <groupId>org.slf4j</groupId>
+                  <artifactId>slf4j-log4j12</artifactId>
+                  <overWrite>false</overWrite>
+                </artifactItem>
+              </artifactItems>
+              <outputDirectory>${basedir}/jars</outputDirectory>
+              <overWriteReleases>false</overWriteReleases>
+              <overWriteSnapshots>false</overWriteSnapshots>
+              <overWriteIfNewer>true</overWriteIfNewer>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/34799cec/org.apache.hdt.hadoop2.release/src/org/apache/hdt/hadoop2/release/HDFSClientRelease.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.hadoop2.release/src/org/apache/hdt/hadoop2/release/HDFSClientRelease.java b/org.apache.hdt.hadoop2.release/src/org/apache/hdt/hadoop2/release/HDFSClientRelease.java
new file mode 100644
index 0000000..72874da
--- /dev/null
+++ b/org.apache.hdt.hadoop2.release/src/org/apache/hdt/hadoop2/release/HDFSClientRelease.java
@@ -0,0 +1,235 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hdt.hadoop2.release;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.net.URI;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsAction;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hdt.core.hdfs.ResourceInformation;
+import org.apache.log4j.Logger;
+
+/**
+ * HDFS Client for HDFS version 1.1.2.21.
+ * 
+ * @author Srimanth Gunturi
+ */
+public class HDFSClientRelease extends org.apache.hdt.core.hdfs.HDFSClient {
+
+	private static Logger logger = Logger.getLogger(HDFSClientRelease.class);
+	private Configuration config;
+
+	public HDFSClientRelease() {
+		config = new Configuration();
+	}
+
+	private ResourceInformation getResourceInformation(FileStatus fileStatus) {
+		ResourceInformation fi = new ResourceInformation();
+		fi.setFolder(fileStatus.isDir());
+		fi.setGroup(fileStatus.getGroup());
+		fi.setLastAccessedTime(fileStatus.getAccessTime());
+		fi.setLastModifiedTime(fileStatus.getAccessTime());
+		fi.setName(fileStatus.getPath().getName());
+		fi.setOwner(fileStatus.getOwner());
+		fi.setPath(fileStatus.getPath().getParent() == null ? "/" : fileStatus.getPath().getParent().toString());
+		fi.setReplicationFactor(fileStatus.getReplication());
+		fi.setSize(fileStatus.getLen());
+		FsPermission fsPermission = fileStatus.getPermission();
+		updatePermissions(fi.getUserPermissions(), fsPermission.getUserAction());
+		updatePermissions(fi.getGroupPermissions(), fsPermission.getGroupAction());
+		updatePermissions(fi.getOtherPermissions(), fsPermission.getOtherAction());
+		return fi;
+	}
+
+	private void updatePermissions(ResourceInformation.Permissions permissions, FsAction action) {
+		permissions.read = action.implies(FsAction.READ);
+		permissions.write = action.implies(FsAction.WRITE);
+		permissions.execute = action.implies(FsAction.EXECUTE);
+	}
+	
+	protected FileSystem createFS(URI uri, String user) throws IOException, InterruptedException{
+		if(user==null)
+			return FileSystem.get(uri, config);
+		return FileSystem.get(uri, config, user);
+	}
+
+	/*
+	 * (non-Javadoc)
+	 * 
+	 * @see org.apache.hdt.core.hdfs.HDFSClient#getResource(java.net.URI)
+	 */
+	@Override
+	public ResourceInformation getResourceInformation(URI uri, String user) throws IOException, InterruptedException {
+		FileSystem fs = createFS(uri, user);
+		Path path = new Path(uri.getPath());
+		FileStatus fileStatus = null;
+		ResourceInformation fi = null;
+		try {
+			fileStatus = fs.getFileStatus(path);
+			fi = getResourceInformation(fileStatus);
+		} catch (FileNotFoundException fne) {
+			logger.info(fne.getMessage());
+			logger.debug(fne.getMessage(), fne);
+		}
+		return fi;
+	}
+
+	/*
+	 * (non-Javadoc)
+	 * 
+	 * @see org.apache.hdt.core.hdfs.HDFSClient#setResource(java.net.URI,
+	 * org.apache.hdt.core.hdfs.ResourceInformation)
+	 */
+	@Override
+	public void setResourceInformation(URI uri, ResourceInformation information, String user) throws IOException, InterruptedException {
+		FileSystem fs = createFS(uri, user);
+		Path path = new Path(uri.getPath());
+		if (!information.isFolder()) {
+			fs.setTimes(path, information.getLastModifiedTime(), information.getLastAccessedTime());
+		}
+		if (information.getOwner() != null || information.getGroup() != null)
+			fs.setOwner(path, information.getOwner(), information.getGroup());
+	}
+
+	/*
+	 * (non-Javadoc)
+	 * 
+	 * @see
+	 * org.apache.hdt.core.hdfs.HDFSClient#listResources(java.net.URI)
+	 */
+	@Override
+	public List<ResourceInformation> listResources(URI uri, String user) throws IOException, InterruptedException {
+		List<ResourceInformation> ris = null;
+		FileSystem fs = createFS(uri, user);
+		Path path = new Path(uri.getPath());
+		FileStatus[] listStatus = fs.listStatus(path);
+		if (listStatus != null) {
+			ris = new ArrayList<ResourceInformation>();
+			for (FileStatus ls : listStatus) {
+				ris.add(getResourceInformation(ls));
+			}
+		}
+		return ris;
+	}
+
+	/*
+	 * (non-Javadoc)
+	 * 
+	 * @see
+	 * org.apache.hdt.core.hdfs.HDFSClient#openInputStream(java.net.URI,
+	 * org.eclipse.core.runtime.IProgressMonitor)
+	 */
+	@Override
+	public InputStream openInputStream(URI uri, String user) throws IOException, InterruptedException {
+		FileSystem fs = createFS(uri, user);
+		Path path = new Path(uri.getPath());
+		FSDataInputStream open = fs.open(path);
+		return open;
+	}
+
+	/*
+	 * (non-Javadoc)
+	 * 
+	 * @see
+	 * org.apache.hdt.core.hdfs.HDFSClient#openInputStream(java.net.URI,
+	 * org.eclipse.core.runtime.IProgressMonitor)
+	 */
+	@Override
+	public OutputStream createOutputStream(URI uri, String user) throws IOException, InterruptedException {
+		FileSystem fs = createFS(uri, user);
+		Path path = new Path(uri.getPath());
+		FSDataOutputStream outputStream = fs.create(path);
+		return outputStream;
+	}
+
+	/*
+	 * (non-Javadoc)
+	 * 
+	 * @see
+	 * org.apache.hdt.core.hdfs.HDFSClient#openInputStream(java.net.URI,
+	 * org.eclipse.core.runtime.IProgressMonitor)
+	 */
+	@Override
+	public OutputStream openOutputStream(URI uri, String user) throws IOException, InterruptedException {
+		FileSystem fs = createFS(uri, user);
+		Path path = new Path(uri.getPath());
+		// TODO. Temporary fix till Issue#3 is fixed.
+		FSDataOutputStream outputStream = fs.create(path);
+		return outputStream;
+	}
+
+	/*
+	 * (non-Javadoc)
+	 * 
+	 * @see org.apache.hdt.core.hdfs.HDFSClient#mkdirs(java.net.URI,
+	 * org.eclipse.core.runtime.IProgressMonitor)
+	 */
+	@Override
+	public boolean mkdirs(URI uri, String user) throws IOException, InterruptedException {
+		FileSystem fs = createFS(uri, user);
+		Path path = new Path(uri.getPath());
+		return fs.mkdirs(path);
+	}
+
+	/*
+	 * (non-Javadoc)
+	 * 
+	 * @see org.apache.hdt.core.hdfs.HDFSClient#delete(java.net.URI,
+	 * org.eclipse.core.runtime.IProgressMonitor)
+	 */
+	@Override
+	public void delete(URI uri, String user) throws IOException, InterruptedException {
+		FileSystem fs = createFS(uri, user);
+		Path path = new Path(uri.getPath());
+		fs.delete(path, true);
+	}
+
+	/*
+	 * (non-Javadoc)
+	 * 
+	 * @see
+	 * org.apache.hdt.core.hdfs.HDFSClient#getDefaultUserAndGroupIds()
+	 */
+	@Override
+	public List<String> getDefaultUserAndGroupIds() throws IOException {
+		List<String> idList = new ArrayList<String>();
+		UserGroupInformation currentUser = UserGroupInformation.getCurrentUser();
+		idList.add(currentUser.getShortUserName());
+		String[] groupIds = currentUser.getGroupNames();
+		if (groupIds != null) {
+			for (String groupId : groupIds) {
+				idList.add(groupId);
+			}
+		}
+		return idList;
+	}
+
+}


Mime
View raw message