hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ta...@apache.org
Subject svn commit: r588310 [2/4] - in /lucene/hadoop/trunk/src/contrib/eclipse-plugin: ./ .settings/ META-INF/ resources/ resources/Components/ resources/Old/ src/java/org/apache/hadoop/eclipse/ src/java/org/apache/hadoop/eclipse/actions/ src/java/org/apache/...
Date Thu, 25 Oct 2007 18:58:39 GMT
Copied: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/EditLocationAction.java (from r566883, lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/EditServerAction.java)
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/EditLocationAction.java?p2=lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/EditLocationAction.java&p1=lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/EditServerAction.java&r1=566883&r2=588310&rev=588310&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/EditServerAction.java (original)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/EditLocationAction.java Thu Oct 25 11:58:32 2007
@@ -1,38 +1,43 @@
-package org.apache.hadoop.eclipse.actions;
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 
-import java.io.IOException;
+package org.apache.hadoop.eclipse.actions;
 
-import org.apache.hadoop.eclipse.Activator;
+import org.apache.hadoop.eclipse.ImageLibrary;
 import org.apache.hadoop.eclipse.server.HadoopServer;
-import org.apache.hadoop.eclipse.servers.DefineHadoopServerLocWizardPage;
+import org.apache.hadoop.eclipse.servers.HadoopLocationWizard;
 import org.apache.hadoop.eclipse.view.servers.ServerView;
-import org.eclipse.core.runtime.FileLocator;
-import org.eclipse.core.runtime.Path;
 import org.eclipse.jface.action.Action;
-import org.eclipse.jface.resource.ImageDescriptor;
 import org.eclipse.jface.wizard.Wizard;
 import org.eclipse.jface.wizard.WizardDialog;
 
 /**
  * Editing server properties action
  */
-public class EditServerAction extends Action {
+public class EditLocationAction extends Action {
 
   private ServerView serverView;
 
-  public EditServerAction(ServerView serverView) {
+  public EditLocationAction(ServerView serverView) {
     this.serverView = serverView;
 
-    setText("Edit Hadoop Server");
-    try {
-      // TODO Edit server icon
-      setImageDescriptor(ImageDescriptor.createFromURL((FileLocator
-          .toFileURL(FileLocator.find(Activator.getDefault().getBundle(),
-              new Path("resources/hadoop_small.gif"), null)))));
-    } catch (IOException e) {
-      /* Ignore if no image */
-      e.printStackTrace();
-    }
+    setText("Edit Hadoop location...");
+    setImageDescriptor(ImageLibrary.get("server.view.action.location.edit"));
   }
 
   @Override
@@ -43,19 +48,19 @@
       return;
 
     WizardDialog dialog = new WizardDialog(null, new Wizard() {
-      private DefineHadoopServerLocWizardPage page =
-          new DefineHadoopServerLocWizardPage(server);
+      private HadoopLocationWizard page = new HadoopLocationWizard(server);
 
       @Override
       public void addPages() {
         super.addPages();
-        setWindowTitle("Edit Hadoop Server Location");
+        setWindowTitle("Edit Hadoop location...");
         addPage(page);
       }
 
       @Override
       public boolean performFinish() {
-        return (page.performFinish() != null);
+        page.performFinish();
+        return true;
       }
     });
 

Copied: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/NewLocationAction.java (from r566883, lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/NewServerAction.java)
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/NewLocationAction.java?p2=lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/NewLocationAction.java&p1=lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/NewServerAction.java&r1=566883&r2=588310&rev=588310&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/NewServerAction.java (original)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/NewLocationAction.java Thu Oct 25 11:58:32 2007
@@ -18,14 +18,9 @@
 
 package org.apache.hadoop.eclipse.actions;
 
-import java.io.IOException;
-
-import org.apache.hadoop.eclipse.Activator;
-import org.apache.hadoop.eclipse.servers.DefineHadoopServerLocWizardPage;
-import org.eclipse.core.runtime.FileLocator;
-import org.eclipse.core.runtime.Path;
+import org.apache.hadoop.eclipse.ImageLibrary;
+import org.apache.hadoop.eclipse.servers.HadoopLocationWizard;
 import org.eclipse.jface.action.Action;
-import org.eclipse.jface.resource.ImageDescriptor;
 import org.eclipse.jface.wizard.Wizard;
 import org.eclipse.jface.wizard.WizardDialog;
 
@@ -34,35 +29,28 @@
  * Action corresponding to creating a new MapReduce Server.
  */
 
-public class NewServerAction extends Action {
-  public NewServerAction() {
-    setText("New Hadoop Server");
-    try {
-      // TODO decorate with + sign to indicate create
-      setImageDescriptor(ImageDescriptor.createFromURL((FileLocator
-          .toFileURL(FileLocator.find(Activator.getDefault().getBundle(),
-              new Path("resources/hadoop_small.gif"), null)))));
-    } catch (IOException e) {
-      /* Ignore if no image */
-      e.printStackTrace();
-    }
+public class NewLocationAction extends Action {
+  public NewLocationAction() {
+    setText("New Hadoop location...");
+    setImageDescriptor(ImageLibrary.get("server.view.action.location.new"));
   }
 
   @Override
   public void run() {
     WizardDialog dialog = new WizardDialog(null, new Wizard() {
-      private DefineHadoopServerLocWizardPage page = new DefineHadoopServerLocWizardPage();
+      private HadoopLocationWizard page = new HadoopLocationWizard();
 
       @Override
       public void addPages() {
         super.addPages();
-        setWindowTitle("New Hadoop Server Location");
+        setWindowTitle("New Hadoop location...");
         addPage(page);
       }
 
       @Override
       public boolean performFinish() {
-        return page.performFinish() != null;
+        page.performFinish();
+        return true;
       }
 
     });

Modified: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/OpenNewMRProjectAction.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/OpenNewMRProjectAction.java?rev=588310&r1=588309&r2=588310&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/OpenNewMRProjectAction.java (original)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/OpenNewMRProjectAction.java Thu Oct 25 11:58:32 2007
@@ -27,9 +27,8 @@
 import org.eclipse.ui.IWorkbench;
 import org.eclipse.ui.PlatformUI;
 
-
 /**
- * Action to open a new MapReduce project.
+ * Action to open a new Map/Reduce project.
  */
 
 public class OpenNewMRProjectAction extends Action {

Modified: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/ActionProvider.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/ActionProvider.java?rev=588310&r1=588309&r2=588310&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/ActionProvider.java (original)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/ActionProvider.java Thu Oct 25 11:58:32 2007
@@ -18,23 +18,20 @@
 
 package org.apache.hadoop.eclipse.dfs;
 
-import java.util.HashMap;
-import java.util.Map;
-
+import org.apache.hadoop.eclipse.ImageLibrary;
+import org.apache.hadoop.eclipse.actions.DFSActionImpl;
 import org.eclipse.jface.action.Action;
 import org.eclipse.jface.action.IMenuManager;
 import org.eclipse.jface.resource.ImageDescriptor;
 import org.eclipse.jface.viewers.ISelection;
 import org.eclipse.jface.viewers.IStructuredSelection;
 import org.eclipse.ui.IActionBars;
-import org.eclipse.ui.ISharedImages;
 import org.eclipse.ui.PlatformUI;
 import org.eclipse.ui.actions.ActionFactory;
 import org.eclipse.ui.navigator.CommonActionProvider;
 import org.eclipse.ui.navigator.ICommonActionConstants;
 import org.eclipse.ui.navigator.ICommonActionExtensionSite;
 import org.eclipse.ui.navigator.ICommonMenuConstants;
-import org.eclipse.ui.plugin.AbstractUIPlugin;
 
 /**
  * Allows the user to delete and refresh items in the DFS tree
@@ -42,10 +39,7 @@
 
 public class ActionProvider extends CommonActionProvider {
 
-  private ICommonActionExtensionSite site;
-
-  private Map<String, ImageDescriptor> descriptors =
-      new HashMap<String, ImageDescriptor>();
+  private static ICommonActionExtensionSite site;
 
   public ActionProvider() {
   }
@@ -53,38 +47,34 @@
   /* @inheritDoc */
   @Override
   public void init(ICommonActionExtensionSite site) {
+    if (ActionProvider.site != null) {
+      System.err.printf("%s: Multiple init()\n", this.getClass()
+          .getCanonicalName());
+      return;
+    }
     super.init(site);
-    this.site = site;
-
-    descriptors
-        .put("dfs.delete", PlatformUI.getWorkbench().getSharedImages()
-            .getImageDescriptor(ISharedImages.IMG_TOOL_DELETE));
-    descriptors.put("dfs.refresh", AbstractUIPlugin
-        .imageDescriptorFromPlugin("org.eclipse.core.tools.resources",
-            "icons/refresh.gif"));
-    // NOTE(jz)
-    // pretty brittle, but worst case no image
-    // descriptors.put("dfs.put",
-    // NavigatorPlugin.imageDescriptorFromPlugin("org.eclipse.core.tools.resources",
-    // "icons/refresh.gif"));
+    ActionProvider.site = site;
   }
 
   /* @inheritDoc */
   @Override
   public void fillActionBars(IActionBars actionBars) {
     actionBars.setGlobalActionHandler(ActionFactory.DELETE.getId(),
-        new DfsAction("dfs.delete", "Delete"));
+        new DFSAction(DFSActions.DELETE));
     actionBars.setGlobalActionHandler(ActionFactory.REFRESH.getId(),
-        new DfsAction("dfs.refresh", "Refresh"));
+        new DFSAction(DFSActions.REFRESH));
+
+    if (site == null)
+      return;
 
-    if ((this.site != null)
-        && (this.site.getStructuredViewer().getSelection() instanceof IStructuredSelection)
-        && (((IStructuredSelection) this.site.getStructuredViewer()
+    if ((site.getStructuredViewer().getSelection() instanceof IStructuredSelection)
+        && (((IStructuredSelection) site.getStructuredViewer()
             .getSelection()).size() == 1)
-        && (((IStructuredSelection) this.site.getStructuredViewer()
-            .getSelection()).getFirstElement() instanceof DfsFile)) {
+        && (((IStructuredSelection) site.getStructuredViewer()
+            .getSelection()).getFirstElement() instanceof DFSFile)) {
+
       actionBars.setGlobalActionHandler(ICommonActionConstants.OPEN,
-          new DfsAction("dfs.open", "View"));
+          new DFSAction(DFSActions.OPEN));
     }
 
     actionBars.updateActionBars();
@@ -93,75 +83,100 @@
   /* @inheritDoc */
   @Override
   public void fillContextMenu(IMenuManager menu) {
-    menu.appendToGroup(ICommonMenuConstants.GROUP_EDIT, new DfsAction(
-        "dfs.delete", "Delete"));
-    menu.appendToGroup(ICommonMenuConstants.GROUP_EDIT, new DfsAction(
-        "dfs.refresh", "Refresh"));
+    /*
+     * Actions on multiple selections
+     */
+    menu.appendToGroup(ICommonMenuConstants.GROUP_EDIT, new DFSAction(
+        DFSActions.DELETE));
+
+    menu.appendToGroup(ICommonMenuConstants.GROUP_OPEN, new DFSAction(
+        DFSActions.REFRESH));
 
-    menu.appendToGroup(ICommonMenuConstants.GROUP_NEW, new DfsAction(
-        "dfs.get", "Download to local directory..."));
+    menu.appendToGroup(ICommonMenuConstants.GROUP_NEW, new DFSAction(
+        DFSActions.DOWNLOAD));
 
-    if (this.site == null)
+    if (site == null)
       return;
 
-    ISelection isel = this.site.getStructuredViewer().getSelection();
+    ISelection isel = site.getStructuredViewer().getSelection();
     if (!(isel instanceof IStructuredSelection))
       return;
 
+    /*
+     * Actions on single selections only
+     */
+
     IStructuredSelection issel = (IStructuredSelection) isel;
     if (issel.size() != 1)
       return;
-
     Object element = issel.getFirstElement();
 
-    if (element instanceof DfsFile) {
-      menu.appendToGroup(ICommonMenuConstants.GROUP_OPEN, new DfsAction(
-          "dfs.open", "View"));
-
-    } else if (element instanceof DfsFolder) {
-      menu.appendToGroup(ICommonMenuConstants.GROUP_NEW, new DfsAction(
-          "dfs.put", "Import from local directory..."));
+    if (element instanceof DFSFile) {
+      menu.appendToGroup(ICommonMenuConstants.GROUP_OPEN, new DFSAction(
+          DFSActions.OPEN));
+
+    } else if (element instanceof DFSFolder) {
+      menu.appendToGroup(ICommonMenuConstants.GROUP_NEW, new DFSAction(
+          DFSActions.MKDIR));
+      menu.appendToGroup(ICommonMenuConstants.GROUP_NEW, new DFSAction(
+          DFSActions.UPLOAD_FILES));
+      menu.appendToGroup(ICommonMenuConstants.GROUP_NEW, new DFSAction(
+          DFSActions.UPLOAD_DIR));
+
+    } else if (element instanceof DFSLocation) {
+      menu.appendToGroup(ICommonMenuConstants.GROUP_OPEN, new DFSAction(
+          DFSActions.RECONNECT));
+
+    } else if (element instanceof DFSLocationsRoot) {
+      menu.appendToGroup(ICommonMenuConstants.GROUP_OPEN, new DFSAction(
+          DFSActions.DISCONNECT));
     }
+
   }
 
   /**
-   * 
+   * Representation of an action on a DFS entry in the browser
    */
-  public class DfsAction extends Action {
+  public static class DFSAction extends Action {
 
-    private final String actionDefinition;
+    private final String id;
 
     private final String title;
 
-    public DfsAction(String actionDefinition, String title) {
-      this.actionDefinition = actionDefinition;
+    private DFSActions action;
+
+    public DFSAction(String id, String title) {
+      this.id = id;
       this.title = title;
+    }
 
+    public DFSAction(DFSActions action) {
+      this.id = action.id;
+      this.title = action.title;
     }
 
+    /* @inheritDoc */
     @Override
     public String getText() {
       return this.title;
     }
 
+    /* @inheritDoc */
     @Override
     public ImageDescriptor getImageDescriptor() {
-      if (descriptors.containsKey(getActionDefinitionId())) {
-        return (ImageDescriptor) descriptors.get(getActionDefinitionId());
-      } else {
-        return null;
-      }
+      return ImageLibrary.get(getActionDefinitionId());
     }
 
+    /* @inheritDoc */
     @Override
     public String getActionDefinitionId() {
-      return actionDefinition;
+      return id;
     }
 
+    /* @inheritDoc */
     @Override
     public void run() {
-      org.apache.hadoop.eclipse.actions.DfsAction action =
-          new org.apache.hadoop.eclipse.actions.DfsAction();
+      DFSActionImpl action = new DFSActionImpl();
       action.setActivePart(this, PlatformUI.getWorkbench()
           .getActiveWorkbenchWindow().getActivePage().getActivePart());
       action.selectionChanged(this, site.getStructuredViewer()
@@ -169,6 +184,7 @@
       action.run(this);
     }
 
+    /* @inheritDoc */
     @Override
     public boolean isEnabled() {
       return true;

Added: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSActions.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSActions.java?rev=588310&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSActions.java (added)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSActions.java Thu Oct 25 11:58:32 2007
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.dfs;
+
+public enum DFSActions {
+
+  DELETE("Delete"), REFRESH("Refresh"), DOWNLOAD("Download from DFS..."), OPEN(
+      "View"), MKDIR("Create new directory..."), UPLOAD_FILES(
+      "Upload files to DFS..."), UPLOAD_DIR("Upload directory to DFS..."), RECONNECT(
+      "Reconnect"), DISCONNECT("Disconnect");
+
+  final String title;
+
+  final String id;
+
+  private static final String PREFIX = "dfs.browser.action.";
+
+  public static DFSActions getById(String def) {
+    if (!def.startsWith(PREFIX))
+      return null;
+    return valueOf(def.substring(PREFIX.length()).toUpperCase());
+  }
+
+  DFSActions(String title) {
+    this.title = title;
+    this.id = PREFIX + this.name().toLowerCase();
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSContent.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSContent.java?rev=588310&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSContent.java (added)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSContent.java Thu Oct 25 11:58:32 2007
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.dfs;
+
+/**
+ * Interface to define content entities in the DFS browser
+ */
+public interface DFSContent {
+
+  boolean hasChildren();
+  
+  DFSContent[] getChildren();
+  
+  void refresh();
+  
+}

Modified: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSContentProvider.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSContentProvider.java?rev=588310&r1=588309&r2=588310&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSContentProvider.java (original)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSContentProvider.java Thu Oct 25 11:58:32 2007
@@ -18,17 +18,12 @@
 
 package org.apache.hadoop.eclipse.dfs;
 
-import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
 
-import org.apache.hadoop.eclipse.Activator;
+import org.apache.hadoop.eclipse.ImageLibrary;
 import org.apache.hadoop.eclipse.server.HadoopServer;
-import org.apache.hadoop.eclipse.servers.IHadoopServerListener;
 import org.apache.hadoop.eclipse.servers.ServerRegistry;
-import org.eclipse.core.resources.ResourcesPlugin;
-import org.eclipse.core.runtime.FileLocator;
-import org.eclipse.core.runtime.IAdaptable;
-import org.eclipse.core.runtime.Path;
-import org.eclipse.jface.resource.ImageDescriptor;
 import org.eclipse.jface.viewers.ILabelProvider;
 import org.eclipse.jface.viewers.ILabelProviderListener;
 import org.eclipse.jface.viewers.ITreeContentProvider;
@@ -36,12 +31,25 @@
 import org.eclipse.jface.viewers.Viewer;
 import org.eclipse.swt.graphics.Image;
 import org.eclipse.swt.widgets.Display;
-import org.eclipse.ui.ISharedImages;
-import org.eclipse.ui.PlatformUI;
-import org.eclipse.ui.model.IWorkbenchAdapter;
 
 /**
- * Handles viewing the DFS
+ * Handles viewing of DFS locations
+ * <p>
+ * 
+ * The content handled by this provider is a tree:
+ * 
+ * <tt>
+ * <br>DFSLocationsRoot
+ * <br>\_HadoopServer
+ * <br>|  \_DfsFolder
+ * <br>|  |  \_DfsFile
+ * <br>|  \_DfsFolder
+ * <br>| ...
+ * <br>\_HadoopServer...
+ * </tt>
+ * 
+ * The code should not block here: blocking operations need to be done
+ * asynchronously so as not to freeze the UI!
  */
 public class DFSContentProvider implements ITreeContentProvider,
     ILabelProvider {
@@ -51,153 +59,186 @@
    */
   private Viewer viewer;
 
-  private ImageDescriptor hadoopImage;
+  private StructuredViewer sviewer;
 
-  private ImageDescriptor folderImage;
+  private Map<HadoopServer, DFSContent> rootFolders =
+      new HashMap<HadoopServer, DFSContent>();
 
-  private ImageDescriptor fileImage;
+  /**
+   * Constructor: load resources (icons).
+   */
+  public DFSContentProvider() {
+  }
 
-  private ImageDescriptor dfsImage;
+  private final DFSLocationsRoot locationsRoot = new DFSLocationsRoot(this);
 
-  public DFSContentProvider() {
-    try {
-      hadoopImage =
-          ImageDescriptor.createFromURL((FileLocator.toFileURL(FileLocator
-              .find(Activator.getDefault().getBundle(), new Path(
-                  "resources/hadoop_small.gif"), null))));
-      dfsImage =
-          ImageDescriptor.createFromURL((FileLocator.toFileURL(FileLocator
-              .find(Activator.getDefault().getBundle(), new Path(
-                  "resources/files.gif"), null))));
-    } catch (IOException e) {
-      e.printStackTrace();
-      // no images, okay, will deal with that
-    }
+  /*
+   * ITreeContentProvider implementation
+   */
+
+  /* @inheritDoc */
+  public Object[] getChildren(Object parent) {
+
+    if (!(parent instanceof DFSContent))
+      return null;
+    DFSContent content = (DFSContent) parent;
+    return content.getChildren();
   }
 
-  public Object[] getChildren(Object parentElement) {
-    if (parentElement instanceof DummyWorkspace) {
-      return ResourcesPlugin.getWorkspace().getRoot().getProjects();
-    }
-    if (parentElement instanceof DFS) {
+  public Object[] test(Object parentElement) {
+    if (parentElement instanceof DFSLocationsRoot) {
       return ServerRegistry.getInstance().getServers().toArray();
+
     } else if (parentElement instanceof HadoopServer) {
-      return new Object[] { new DfsFolder((HadoopServer) parentElement, "/",
-          viewer) };
-    } else if (parentElement instanceof DfsFolder) {
-      return ((DfsFolder) parentElement).getChildren();
+      final HadoopServer location = (HadoopServer) parentElement;
+      Object root = rootFolders.get(location);
+      if (root != null)
+        return new Object[] { root };
+
+      return new Object[] { "Connecting to DFS..." };
+
+    } else if (parentElement instanceof DFSFolder) {
+      DFSFolder folder = (DFSFolder) parentElement;
+      return folder.getChildren();
     }
 
-    return new Object[0];
+    return new Object[] { "<Unknown DFSContent>" };
   }
 
+  /* @inheritDoc */
   public Object getParent(Object element) {
-    if (element instanceof DfsPath) {
-      return ((DfsPath) element).getParent();
+
+    if (element instanceof DFSPath) {
+      return ((DFSPath) element).getParent();
+
     } else if (element instanceof HadoopServer) {
-      return dfs;
-    } else {
-      return null;
+      return locationsRoot;
     }
+
+    return null;
   }
 
+  /* @inheritDoc */
   public boolean hasChildren(Object element) {
-    return (element instanceof HadoopServer)
-        || (element instanceof DfsFolder) || (element instanceof DFS)
-        || (element instanceof DummyWorkspace);
-  }
-
-  public class DFS {
-    public DFS() {
-      ServerRegistry.getInstance().addListener(new IHadoopServerListener() {
-        public void serverChanged(final HadoopServer location, final int type) {
-          if (viewer != null) {
-            Display.getDefault().syncExec(new Runnable() {
-              public void run() {
-                if (type == ServerRegistry.SERVER_STATE_CHANGED) {
-                  ((StructuredViewer) viewer).refresh(location);
-                } else {
-                  ((StructuredViewer) viewer).refresh(ResourcesPlugin
-                      .getWorkspace().getRoot());
-                }
-              }
-            });
-          }
-        }
-      });
-    }
-
-    @Override
-    public String toString() {
-      return "MapReduce DFS";
+    if (element instanceof DFSContent) {
+      DFSContent content = (DFSContent) element;
+      return content.hasChildren();
     }
+    return false;
   }
 
-  private final DFS dfs = new DFS();
-
-  private final Object workspace = new DummyWorkspace();
-
-  private static class DummyWorkspace {
-    @Override
-    public String toString() {
-      return "Workspace";
-    }
-  };
+  /*
+   * IStructureContentProvider implementation
+   */
 
+  /* @inheritDoc */
   public Object[] getElements(final Object inputElement) {
-    return ServerRegistry.getInstance().getServers().toArray();
+    return new Object[] { locationsRoot };
+    // return ServerRegistry.getInstance().getServers().toArray();
   }
 
-  public void dispose() {
+  /*
+   * ILabelProvider implementation
+   */
 
-  }
+  /* @inheritDoc */
+  public Image getImage(Object element) {
+    if (element instanceof DFSLocationsRoot)
+      return ImageLibrary.getImage("dfs.browser.root.entry");
 
-  public void inputChanged(Viewer viewer, Object oldInput, Object newInput) {
-    this.viewer = viewer;
-  }
+    else if (element instanceof DFSLocation)
+      return ImageLibrary.getImage("dfs.browser.location.entry");
 
-  public Image getImage(Object element) {
-    if (element instanceof DummyWorkspace) {
-      IWorkbenchAdapter a =
-          (IWorkbenchAdapter) ((IAdaptable) ResourcesPlugin.getWorkspace()
-              .getRoot()).getAdapter(IWorkbenchAdapter.class);
-      return a.getImageDescriptor(ResourcesPlugin.getWorkspace().getRoot())
-          .createImage();
-    } else if (element instanceof DFS) {
-      return dfsImage.createImage(true);
-    } else if (element instanceof HadoopServer) {
-      return hadoopImage.createImage(true);
-    } else if (element instanceof DfsFolder) {
-      return PlatformUI.getWorkbench().getSharedImages().getImageDescriptor(
-          ISharedImages.IMG_OBJ_FOLDER).createImage();
-    } else if (element instanceof DfsFile) {
-      return PlatformUI.getWorkbench().getSharedImages().getImageDescriptor(
-          ISharedImages.IMG_OBJ_FILE).createImage();
-    }
+    else if (element instanceof DFSFolder)
+      return ImageLibrary.getImage("dfs.browser.folder.entry");
+
+    else if (element instanceof DFSFile)
+      return ImageLibrary.getImage("dfs.browser.file.entry");
 
     return null;
   }
 
+  /* @inheritDoc */
   public String getText(Object element) {
-    if (element instanceof DummyWorkspace) {
-      IWorkbenchAdapter a =
-          (IWorkbenchAdapter) ((IAdaptable) ResourcesPlugin.getWorkspace()
-              .getRoot()).getAdapter(IWorkbenchAdapter.class);
-      return a.getLabel(ResourcesPlugin.getWorkspace().getRoot());
-    } else {
-      return element.toString();
-    }
+    if (element instanceof DFSFile)
+      return ((DFSFile) element).toDetailedString();
+
+    return element.toString();
   }
 
+  /*
+   * IBaseLabelProvider implementation
+   */
+
+  /* @inheritDoc */
   public void addListener(ILabelProviderListener listener) {
+  }
 
+  /* @inheritDoc */
+  public void removeListener(ILabelProviderListener listener) {
   }
 
+  /* @inheritDoc */
   public boolean isLabelProperty(Object element, String property) {
     return false;
   }
 
-  public void removeListener(ILabelProviderListener listener) {
+  /*
+   * IContentProvider implementation
+   */
+
+  /* @inheritDoc */
+  public void dispose() {
+  }
+
+  /* @inheritDoc */
+  public void inputChanged(Viewer viewer, Object oldInput, Object newInput) {
+    this.viewer = viewer;
+    if ((viewer != null) && (viewer instanceof StructuredViewer))
+      this.sviewer = (StructuredViewer) viewer;
+    else
+      this.sviewer = null;
+  }
+
+  /*
+   * Miscellaneous
+   */
+
+  /**
+   * Ask the viewer for this content to refresh
+   */
+  void refresh() {
+    // no display, nothing to update
+    if (this.viewer == null)
+      return;
+
+    Display.getDefault().asyncExec(new Runnable() {
+      public void run() {
+        DFSContentProvider.this.viewer.refresh();
+      }
+    });
+  }
+
+  /**
+   * Ask the viewer to refresh a single element
+   * 
+   * @param content what to refresh
+   */
+  void refresh(final DFSContent content) {
+    if (this.sviewer != null) {
+      Display.getDefault().asyncExec(new Runnable() {
+        public void run() {
+          DFSContentProvider.this.sviewer.refresh(content);
+        }
+      });
 
+    } else {
+      refresh();
+    }
+  }
+
+  Viewer getViewer() {
+    return this.viewer;
   }
+
 }

Copied: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSFile.java (from r566883, lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DfsFile.java)
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSFile.java?p2=lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSFile.java&p1=lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DfsFile.java&r1=566883&r2=588310&rev=588310&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DfsFile.java (original)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSFile.java Thu Oct 25 11:58:32 2007
@@ -20,124 +20,105 @@
 
 import java.io.BufferedInputStream;
 import java.io.BufferedOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.FileOutputStream;
 import java.io.IOException;
-import java.io.OutputStream;
+import java.io.InputStream;
 import java.lang.reflect.InvocationTargetException;
 
+import org.apache.hadoop.eclipse.Activator;
+import org.apache.hadoop.eclipse.ErrorMessageDialog;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.Path;
+import org.eclipse.core.resources.IStorage;
+import org.eclipse.core.runtime.CoreException;
+import org.eclipse.core.runtime.IPath;
 import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.core.runtime.PlatformObject;
+import org.eclipse.core.runtime.Status;
 import org.eclipse.jface.dialogs.MessageDialog;
 import org.eclipse.jface.operation.IRunnableWithProgress;
 import org.eclipse.ui.PlatformUI;
 
-import com.jcraft.jsch.ChannelExec;
-import com.jcraft.jsch.JSchException;
-
 /**
  * File handling methods for the DFS
  */
-public class DfsFile extends DfsPath {
+public class DFSFile extends DFSPath implements DFSContent {
 
-  public DfsFile(DfsPath parent, String path) {
-    super(parent, path);
-  }
+  protected long length;
+
+  protected short replication;
 
   /**
-   * Download and view contents of a file in the DFS NOTE: may not work on
-   * files >1 MB.
+   * Constructor to upload a file on the distributed file system
    * 
-   * @return a FileInputStream for the file
+   * @param parent
+   * @param path
+   * @param file
+   * @param monitor
    */
-  public FileInputStream open() throws JSchException, IOException,
-      InvocationTargetException, InterruptedException {
+  public DFSFile(DFSPath parent, Path path, File file,
+      IProgressMonitor monitor) {
 
-    File tempFile =
-        File.createTempFile("hadoop" + System.currentTimeMillis(), "tmp");
-    tempFile.deleteOnExit();
+    super(parent, path);
+    this.upload(monitor, file);
+  }
 
-    this.downloadToLocalFile(tempFile);
+  public DFSFile(DFSPath parent, Path path) {
+    super(parent, path);
 
-    // file size greater than 1 MB
-    if (tempFile.length() > 1024 * 1024) {
-      boolean answer =
-          MessageDialog.openQuestion(null, "Show large file from DFS?",
-              "The file you are attempting to download from the DFS, "
-                  + this.getPath() + " is over 1MB in size. \n"
-                  + "Opening this file may cause performance problems."
-                  + " You can open the file with your favourite editor at "
-                  + tempFile.getAbsolutePath()
-                  + " (it's already saved there)."
-                  + " Continue opening the file in eclipse?");
-      if (!answer) {
-        return null;
-      }
+    try {
+      FileStatus fs = getDFS().getFileStatus(path);
+      this.length = fs.getLen();
+      this.replication = fs.getReplication();
+    } catch (IOException e) {
+      e.printStackTrace();
     }
-
-    return new FileInputStream(tempFile);
   }
 
-  public void downloadToLocalFile(File localFile) throws JSchException,
-      IOException, InvocationTargetException, InterruptedException {
-
-    final ChannelExec exec =
+  /**
+   * Download and view contents of a file
+   * 
+   * @return a InputStream for the file
+   */
+  public InputStream open() throws IOException {
 
-    exec(" dfs " + DfsFolder.s_whichFS + " -cat " + getPath());
+    return getDFS().open(this.path);
+  }
 
-    final OutputStream os =
-        new BufferedOutputStream(new FileOutputStream(localFile));
+  /**
+   * Download this file to the local file system. This creates a download
+   * status monitor.
+   * 
+   * @param file
+   * @throws JSchException
+   * @throws IOException
+   * @throws InvocationTargetException
+   * @throws InterruptedException
+   * 
+   * @deprecated
+   */
+  public void downloadToLocalFile(final File file)
+      throws InvocationTargetException, InterruptedException {
 
-    try {
-      PlatformUI.getWorkbench().getProgressService().busyCursorWhile(
-          new IRunnableWithProgress() {
-            public void run(IProgressMonitor monitor)
-                throws InvocationTargetException {
-              try {
-                monitor.beginTask("View file from Distributed File System",
-                    IProgressMonitor.UNKNOWN);
-                exec.connect();
-                BufferedInputStream stream =
-                    new BufferedInputStream(exec.getInputStream());
-
-                byte[] buffer = new byte[1024];
-                int bytes;
-
-                while ((bytes = stream.read(buffer)) >= 0) {
-                  if (monitor.isCanceled()) {
-                    os.close();
-                    return;
-                  }
-
-                  monitor.worked(1);
-                  os.write(buffer, 0, bytes);
-                }
-
-                monitor.done();
-              } catch (Exception e) {
-                throw new InvocationTargetException(e);
-              }
-            }
-          });
-    } finally {
-      if (exec.isConnected()) {
-        exec.disconnect();
-      }
-      os.close();
-    }
+    PlatformUI.getWorkbench().getProgressService().busyCursorWhile(
+        new IRunnableWithProgress() {
+          public void run(IProgressMonitor monitor)
+              throws InvocationTargetException {
+
+            DFSFile.this.downloadToLocalFile(monitor, file);
+          }
+        });
   }
 
   /* @inheritDoc */
   @Override
-  public void downloadToLocalDirectory(String localDirectory)
-      throws InvocationTargetException, JSchException, InterruptedException,
-      IOException {
-
-    File dir = new File(localDirectory);
-    if (!dir.exists() || !dir.isDirectory())
-      return; // TODO display error message
+  public void downloadToLocalDirectory(IProgressMonitor monitor, File dir) {
 
-    File dfsPath = new File(this.getPath());
+    File dfsPath = new File(this.getPath().toString());
     File destination = new File(dir, dfsPath.getName());
 
     if (destination.exists()) {
@@ -151,7 +132,219 @@
         return;
     }
 
-    this.downloadToLocalFile(destination);
+    try {
+      this.downloadToLocalFile(monitor, destination);
+
+    } catch (Exception e) {
+      e.printStackTrace();
+      MessageDialog.openWarning(null, "Download to local file system",
+          "Downloading of file \"" + this.path + "\" to local directory \""
+              + dir + "\" has failed.\n" + e);
+    }
+  }
+
+  /**
+   * Provides a detailed string for this file
+   * 
+   * @return the string formatted as
+   *         <tt>&lt;filename&gt; (&lt;size&gt;, r&lt;replication&gt;)</tt>
+   */
+  public String toDetailedString() {
+    final String[] units = { "b", "Kb", "Mb", "Gb", "Tb" };
+    int unit = 0;
+    double l = this.length;
+    while ((l >= 1024.0) && (unit < units.length)) {
+      unit += 1;
+      l /= 1024.0;
+    }
+
+    return String.format("%s (%.1f %s, r%d)", super.toString(), l,
+        units[unit], this.replication);
+  }
+
+  /* @inheritDoc */
+  @Override
+  public String toString() {
+    return this.path.toString();
+  }
+
+  /*
+   * 
+   */
+
+  /**
+   * Download the DfsFile to a local file. Use the given monitor to report
+   * status of operation.
+   * 
+   * @param monitor the status monitor
+   * @param file the local file where to put the downloaded file
+   * @throws InvocationTargetException
+   */
+  public void downloadToLocalFile(IProgressMonitor monitor, File file)
+      throws InvocationTargetException {
+
+    final int taskSize = 1024;
+
+    monitor.setTaskName("Download file " + this.path);
+
+    BufferedOutputStream ostream = null;
+    DataInputStream istream = null;
+
+    try {
+      istream = getDFS().open(this.path);
+      ostream = new BufferedOutputStream(new FileOutputStream(file));
+
+      int bytes;
+      byte[] buffer = new byte[taskSize];
+
+      while ((bytes = istream.read(buffer)) >= 0) {
+        if (monitor.isCanceled())
+          return;
+        ostream.write(buffer, 0, bytes);
+        monitor.worked(1);
+      }
+
+    } catch (Exception e) {
+      throw new InvocationTargetException(e);
+
+    } finally {
+      // Clean all opened resources
+      if (istream != null) {
+        try {
+          istream.close();
+        } catch (IOException e) {
+          e.printStackTrace();
+          // nothing we can do here
+        }
+      }
+      try {
+        ostream.close();
+      } catch (IOException e) {
+        e.printStackTrace();
+        // nothing we can do here
+      }
+    }
+  }
+
+  /**
+   * Upload a local file to this file on the distributed file system
+   * 
+   * @param monitor
+   * @param file
+   */
+  public void upload(IProgressMonitor monitor, File file) {
+
+    final int taskSize = 1024;
+
+    monitor.setTaskName("Upload file " + this.path);
+
+    BufferedInputStream istream = null;
+    DataOutputStream ostream = null;
+
+    try {
+      istream = new BufferedInputStream(new FileInputStream(file));
+      ostream = getDFS().create(this.path);
+
+      int bytes;
+      byte[] buffer = new byte[taskSize];
+
+      while ((bytes = istream.read(buffer)) >= 0) {
+        if (monitor.isCanceled())
+          return;
+        ostream.write(buffer, 0, bytes);
+        monitor.worked(1);
+      }
+
+    } catch (Exception e) {
+      ErrorMessageDialog.display(String.format(
+          "Unable to uploade file %s to %s", file, this.path), e
+          .getLocalizedMessage());
+
+    } finally {
+      try {
+        if (istream != null)
+          istream.close();
+      } catch (IOException e) {
+        e.printStackTrace();
+        // nothing we can do here
+      }
+      try {
+        if (ostream != null)
+          ostream.close();
+      } catch (IOException e) {
+        e.printStackTrace();
+        // nothing we can do here
+      }
+    }
+  }
+
+  /* @inheritDoc */
+  @Override
+  public void refresh() {
+    getParent().refresh();
+  }
+
+  /* @inheritDoc */
+  @Override
+  public int computeDownloadWork() {
+    return 1 + (int) (this.length / 1024);
+  }
+
+  /**
+   * Creates an adapter for the file to open it in the Editor
+   * 
+   * @return the IStorage
+   */
+  public IStorage getIStorage() {
+    return new IStorageAdapter();
+  }
+
+  /**
+   * IStorage adapter to open the file in the Editor
+   */
+  private class IStorageAdapter extends PlatformObject implements IStorage {
+
+    /* @inheritDoc */
+    public InputStream getContents() throws CoreException {
+      try {
+        return DFSFile.this.open();
+
+      } catch (IOException ioe) {
+        throw new CoreException(new Status(Status.ERROR,
+            Activator.PLUGIN_ID, "Unable to open file \""
+                + DFSFile.this.path + "\"", ioe));
+      }
+    }
+
+    /* @inheritDoc */
+    public IPath getFullPath() {
+      return new org.eclipse.core.runtime.Path(DFSFile.this.path.toString());
+    }
+
+    /* @inheritDoc */
+    public String getName() {
+      return DFSFile.this.path.getName();
+    }
+
+    /* @inheritDoc */
+    public boolean isReadOnly() {
+      return true;
+    }
+
+  }
+
+  /*
+   * Implementation of DFSContent
+   */
+
+  /* @inheritDoc */
+  public DFSContent[] getChildren() {
+    return null;
+  }
+
+  /* @inheritDoc */
+  public boolean hasChildren() {
+    return false;
   }
 
 }

Copied: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSFolder.java (from r566883, lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DfsFolder.java)
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSFolder.java?p2=lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSFolder.java&p1=lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DfsFolder.java&r1=566883&r2=588310&rev=588310&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DfsFolder.java (original)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSFolder.java Thu Oct 25 11:58:32 2007
@@ -18,307 +18,211 @@
 
 package org.apache.hadoop.eclipse.dfs;
 
-import java.io.BufferedInputStream;
-import java.io.BufferedOutputStream;
-import java.io.BufferedReader;
 import java.io.File;
-import java.io.FileInputStream;
 import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.lang.reflect.InvocationTargetException;
-import java.rmi.dgc.VMID;
 import java.util.ArrayList;
 import java.util.List;
-import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import org.apache.hadoop.eclipse.Activator;
 import org.apache.hadoop.eclipse.server.HadoopServer;
+import org.apache.hadoop.fs.Path;
 import org.eclipse.core.runtime.IProgressMonitor;
 import org.eclipse.core.runtime.IStatus;
 import org.eclipse.core.runtime.Status;
-import org.eclipse.core.runtime.SubProgressMonitor;
 import org.eclipse.core.runtime.jobs.Job;
-import org.eclipse.jface.dialogs.ProgressMonitorDialog;
-import org.eclipse.jface.operation.IRunnableWithProgress;
-import org.eclipse.jface.viewers.Viewer;
-import org.eclipse.swt.widgets.Display;
-import org.eclipse.ui.PlatformUI;
-
-import com.jcraft.jsch.ChannelExec;
-import com.jcraft.jsch.JSchException;
-import com.jcraft.jsch.SftpException;
+import org.eclipse.jface.dialogs.MessageDialog;
 
 /**
- * Folder handling methods for the DFS
+ * Local representation of a folder in the DFS.
+ * 
+ * The constructor creates an empty representation of the folder and spawn a
+ * thread that will fill
  */
+public class DFSFolder extends DFSPath implements DFSContent {
 
-public class DfsFolder extends DfsPath {
+  static Logger log = Logger.getLogger(DFSFolder.class.getName());
 
-  protected final static String s_whichFS = ""; // "-fs local";
+  private DFSContent[] children;
 
-  static Logger log = Logger.getLogger(DfsFolder.class.getName());
+  protected DFSFolder(DFSContentProvider provider, HadoopServer location)
+      throws IOException {
 
-  private Object[] children;
-
-  private boolean loading = false;
-
-  protected DfsFolder(HadoopServer location, String path, Viewer viewer) {
-    super(location, path, viewer);
+    super(provider, location);
   }
 
-  private DfsFolder(DfsPath parent, String path) {
+  private DFSFolder(DFSPath parent, Path path) {
     super(parent, path);
   }
 
-  public Object[] getChildren() {
-    ChannelExec channel = null;
-    if (children == null) {
-      doRefresh();
-      return new Object[] { "Loading..." };
-    } else {
-      return children;
+  protected void loadDFSFolderChildren() throws IOException {
+    List<DFSPath> list = new ArrayList<DFSPath>();
+
+    for (Path path : getDFS().listPaths(this.getPath())) {
+      if (getDFS().isDirectory(path)) {
+        list.add(new DFSFolder(this, path));
+      } else {
+        list.add(new DFSFile(this, path));
+      }
     }
+
+    this.children = list.toArray(new DFSContent[list.size()]);
   }
 
+  /**
+   * Does a recursive delete of the remote directory tree at this node.
+   */
   @Override
+  public void delete() {
+
+    try {
+      getDFS().delete(this.path);
+
+    } catch (IOException e) {
+      e.printStackTrace();
+      MessageDialog.openWarning(null, "Delete file",
+          "Unable to delete file \"" + this.path + "\"\n" + e);
+    }
+  }
+
   /**
-   * Forces a refresh of the items in the current DFS node
+   * Upload the given file or directory into this DfsFolder
+   * 
+   * @param file
+   * @throws IOException
    */
-  public void doRefresh() {
-    new Job("Refresh DFS Children") {
-      @Override
-      protected IStatus run(IProgressMonitor monitor) {
-        try {
-          ChannelExec channel =
-              exec(" dfs " + s_whichFS + " -ls " + getPath());
-          InputStream is = channel.getInputStream();
-          BufferedReader in =
-              new BufferedReader(new InputStreamReader(
-                  new BufferedInputStream(is)));
+  public void upload(IProgressMonitor monitor, final File file)
+      throws IOException {
 
-          if (!channel.isConnected()) {
-            channel.connect();
-          }
+    if (file.isDirectory()) {
+      Path filePath = new Path(this.path, file.getName());
+      getDFS().mkdirs(filePath);
+      DFSFolder newFolder = new DFSFolder(this, filePath);
+      monitor.worked(1);
+      for (File child : file.listFiles()) {
+        if (monitor.isCanceled())
+          return;
+        newFolder.upload(monitor, child);
+      }
 
-          try {
-            // initial "found n items" line ignorable
-            if (in.readLine() == null) {
-              children =
-                  new Object[] { "An error occurred: empty result from dfs -ls" };
-            }
-
-            String line;
-            List<DfsPath> children = new ArrayList<DfsPath>();
-            while ((line = in.readLine()) != null) {
-              String[] parts = line.split("\t");
-
-              for (int i = 0; i < parts.length; i++) {
-                log.fine(parts[0]);
-              }
-
-              if (parts[1].equals("<dir>")) {
-                children.add(new DfsFolder(DfsFolder.this, parts[0]));
-              } else {
-                children.add(new DfsFile(DfsFolder.this, parts[0]));
-              }
-            }
+    } else if (file.isFile()) {
+      Path filePath = new Path(this.path, file.getName());
+      DFSFile newFile = new DFSFile(this, filePath, file, monitor);
 
-            DfsFolder.this.children = children.toArray();
+    } else {
+      // XXX don't know what the file is?
+    }
+  }
 
-            DfsFolder.super.doRefresh();
+  /* @inheritDoc */
+  @Override
+  public void downloadToLocalDirectory(IProgressMonitor monitor, File dir) {
+    if (!dir.exists())
+      dir.mkdirs();
+
+    if (!dir.isDirectory()) {
+      MessageDialog.openError(null, "Download to local file system",
+          "Invalid directory location: \"" + dir + "\"");
+      return;
+    }
 
-            return Status.OK_STATUS;
-          } finally {
-            if (channel.isConnected()) {
-              channel.disconnect();
-            }
-          }
-        } catch (Exception e) {
-          e.printStackTrace();
-          return new Status(IStatus.ERROR, Activator.PLUGIN_ID, -1,
-              "Refreshing DFS node failed: " + e.getLocalizedMessage(), e);
-        }
+    File dfsPath = new File(this.getPath().toString());
+    File destination = new File(dir, dfsPath.getName());
+
+    if (!destination.exists()) {
+      if (!destination.mkdir()) {
+        MessageDialog.openError(null, "Download to local directory",
+            "Unable to create directory " + destination.getAbsolutePath());
+        return;
+      }
+    }
+
+    // Download all DfsPath children
+    for (Object childObj : getChildren()) {
+      if (childObj instanceof DFSPath) {
+        ((DFSPath) childObj).downloadToLocalDirectory(monitor, destination);
+        monitor.worked(1);
       }
-    }.schedule();
+    }
   }
 
+  /* @inheritDoc */
   @Override
-  /**
-   * Does a recursive delete of the remote directory tree at this node.
-   */
-  public void delete() throws JSchException {
-    doExec("dfs " + s_whichFS + " -rmr " + getPath());
+  public int computeDownloadWork() {
+    int work = 1;
+    for (DFSContent child : getChildren()) {
+      if (child instanceof DFSPath)
+        work += ((DFSPath) child).computeDownloadWork();
+    }
+
+    return work;
   }
 
   /**
-   * Upload a local directory and its contents to the remote DFS
+   * Create a new sub directory into this directory
    * 
-   * @param directory source directory to upload
-   * @throws SftpException
-   * @throws JSchException
-   * @throws InvocationTargetException
-   * @throws InterruptedException
+   * @param folderName
    */
-  public void put(final String directory) throws SftpException,
-      JSchException, InvocationTargetException, InterruptedException {
-    ProgressMonitorDialog progress =
-        new ProgressMonitorDialog((Display.getCurrent() == null) ? null
-            : Display.getCurrent().getActiveShell());
-    progress.setCancelable(true);
-
-    PlatformUI.getWorkbench().getProgressService().busyCursorWhile(
-        new IRunnableWithProgress() {
-          public void run(IProgressMonitor monitor)
-              throws InvocationTargetException, InterruptedException {
-            String guid = new VMID().toString().replace(':', '_');
-
-            monitor.beginTask("Secure Copy", 100);
-            scp(directory, "/tmp/hadoop_scp_" + guid,
-                new SubProgressMonitor(monitor, 60));
-
-            try {
-              SubProgressMonitor sub = new SubProgressMonitor(monitor, 1);
-              if (monitor.isCanceled()) {
-                return;
-              }
-
-              final File dir = new File(directory);
-
-              sub.beginTask("Move files from staging server to DFS", 1);
-              ChannelExec exec =
-                  exec(" dfs " + s_whichFS
-                      + " -moveFromLocal /tmp/hadoop_scp_" + guid + " \""
-                      + getPath() + "/" + dir.getName() + "\"");
-              BufferedReader reader =
-                  new BufferedReader(new InputStreamReader(
-                      new BufferedInputStream(exec.getInputStream())));
-
-              if (!monitor.isCanceled()) {
-                exec.connect();
-                String line = reader.readLine();
-                sub.worked(1);
-              }
-
-              if (exec.isConnected()) {
-                exec.disconnect();
-              }
-
-              sub.done();
-
-              monitor.done();
-              doRefresh();
-            } catch (Exception e) {
-              log.log(Level.SEVERE, "", e);
-              throw new InvocationTargetException(e);
-            }
-          }
+  public void mkdir(String folderName) {
+    try {
+      getDFS().mkdirs(new Path(this.path, folderName));
+    } catch (IOException ioe) {
+      ioe.printStackTrace();
+    }
+    doRefresh();
+  }
+
+  /*
+   * Implementation of DFSContent
+   */
+
+  /* @inheritDoc */
+  public boolean hasChildren() {
+    if (this.children == null)
+      return true;
+    else
+      return (this.children.length > 0);
+  }
 
-          public void scp(String from, String to, IProgressMonitor monitor) {
-            File file = new File(from);
-            ChannelExec channel = null;
-
-            monitor.beginTask("scp from " + from + " to " + to, 100 * (file
-                .isDirectory() ? file.list().length + 1 : 1));
-
-            if (monitor.isCanceled()) {
-              return;
-            }
-
-            if (file.isDirectory()) {
-              // mkdir
-              try {
-                channel = (ChannelExec) getSession().openChannel("exec");
-                channel.setCommand(" mkdir " + to);
-                InputStream in = channel.getInputStream();
-                channel.connect();
-                // in.read(); // wait for a response, which
-                // we'll then ignore
-              } catch (JSchException e) {
-                // BUG(jz) abort operation and display error
-                throw new RuntimeException(e);
-              } catch (IOException e) {
-                throw new RuntimeException(e);
-              } finally {
-                if (channel.isConnected()) {
-                  channel.disconnect();
-                }
-              }
-
-              monitor.worked(100);
-
-              String[] children = file.list();
-              for (int i = 0; i < children.length; i++) {
-                File child = new File(file, children[i]);
-
-                // recurse
-                scp(new File(file, children[i]).getAbsolutePath(), to + "/"
-                    + children[i], new SubProgressMonitor(monitor, 100));
-              }
-            } else {
-              InputStream filein = null;
-
-              try {
-                channel = (ChannelExec) getSession().openChannel("exec");
-                (channel).setCommand("scp -p -t " + to);
-                BufferedOutputStream out =
-                    new BufferedOutputStream(channel.getOutputStream());
-                InputStream in = channel.getInputStream();
-                channel.connect();
-
-                if (in.read() == 0) {
-                  int step = (int) (100 / new File(from).length());
-                  out.write(("C0644 " + new File(from).length() + " "
-                      + new File(to).getName() + "\n").getBytes());
-                  out.flush();
-                  if (in.read() != 0) {
-                    throw new RuntimeException("Copy failed");
-                  }
-
-                  filein =
-                      new BufferedInputStream(new FileInputStream(from));
-
-                  byte[] buffer = new byte[1024];
-                  int bytes;
-                  while ((bytes = filein.read(buffer)) > -1) {
-                    if (monitor.isCanceled()) {
-                      return;
-                    }
-
-                    out.write(buffer, 0, bytes);
-                    monitor.worked(step);
-                  }
-
-                  out.write("\0".getBytes());
-                  out.flush();
-
-                  if (in.read() != 0) {
-                    throw new RuntimeException("Copy failed");
-                  }
-                  out.close();
-                } else {
-                  // problems with copy
-                  throw new RuntimeException("Copy failed");
-                }
-              } catch (JSchException e) {
-                e.printStackTrace();
-                throw new RuntimeException(e);
-              } catch (IOException e) {
-                throw new RuntimeException(e);
-              } finally {
-                if (channel.isConnected()) {
-                  channel.disconnect();
-                }
-                try {
-                  filein.close();
-                } catch (IOException e) {
-                }
-              }
-            }
+  /* @inheritDoc */
+  public DFSContent[] getChildren() {
+    if (children == null) {
+      new Job("Connecting to DFS " + location) {
+        @Override
+        protected IStatus run(IProgressMonitor monitor) {
+          try {
+            loadDFSFolderChildren();
+            return Status.OK_STATUS;
+
+          } catch (IOException ioe) {
+            children =
+                new DFSContent[] { new DFSMessage("Error: "
+                    + ioe.getLocalizedMessage()) };
+            return Status.CANCEL_STATUS;
 
-            monitor.done();
+          } finally {
+            // Under all circumstances, update the UI
+            provider.refresh(DFSFolder.this);
           }
-        });
+        }
+      }.schedule();
+
+      return new DFSContent[] { new DFSMessage("Listing folder content...") };
+    }
+    return this.children;
   }
+
+  /* @inheritDoc */
+  @Override
+  public void refresh() {
+    this.children = null;
+    this.doRefresh();
+  }
+
+  /* @inheritDoc */
+  @Override
+  public String toString() {
+    return String.format("%s (%s)", super.toString(),
+        this.getChildren().length);
+  }
+
 }

Added: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSLocation.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSLocation.java?rev=588310&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSLocation.java (added)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSLocation.java Thu Oct 25 11:58:32 2007
@@ -0,0 +1,108 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.dfs;
+
+import java.io.IOException;
+
+import org.apache.hadoop.eclipse.server.HadoopServer;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.core.runtime.IStatus;
+import org.eclipse.core.runtime.Status;
+import org.eclipse.core.runtime.jobs.Job;
+
+/**
+ * DFS Content representation of a HDFS location
+ */
+public class DFSLocation implements DFSContent {
+
+  private final DFSContentProvider provider;
+
+  private final HadoopServer location;
+
+  private DFSContent rootFolder = null;
+
+  DFSLocation(DFSContentProvider provider, HadoopServer server) {
+    this.provider = provider;
+    this.location = server;
+  }
+
+  /* @inheritDoc */
+  @Override
+  public String toString() {
+    return this.location.getLocationName();
+  }
+
+  /*
+   * Implementation of DFSContent
+   */
+
+  /* @inheritDoc */
+  public DFSContent[] getChildren() {
+    if (this.rootFolder == null) {
+      /*
+       * DfsFolder constructor might block as it contacts the NameNode: work
+       * asynchronously here or this will potentially freeze the UI
+       */
+      new Job("Connecting to DFS " + location) {
+        @Override
+        protected IStatus run(IProgressMonitor monitor) {
+          try {
+            rootFolder = new DFSFolder(provider, location);
+            return Status.OK_STATUS;
+
+          } catch (IOException ioe) {
+            rootFolder =
+                new DFSMessage("Error: " + ioe.getLocalizedMessage());
+            return Status.CANCEL_STATUS;
+
+          } finally {
+            // Under all circumstances, update the UI
+            provider.refresh(DFSLocation.this);
+          }
+        }
+      }.schedule();
+
+      return new DFSContent[] { new DFSMessage("Connecting to DFS "
+          + toString()) };
+    }
+    return new DFSContent[] { this.rootFolder };
+  }
+
+  /* @inheritDoc */
+  public boolean hasChildren() {
+    return true;
+  }
+  
+  /* @inheritDoc */
+  public void refresh() {
+    this.rootFolder = null;
+    this.provider.refresh(this);
+  }
+
+  /*
+   * Actions
+   */
+  
+  /**
+   * Refresh the location using a new connection
+   */
+  public void reconnect() {
+    this.refresh();
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSLocationsRoot.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSLocationsRoot.java?rev=588310&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSLocationsRoot.java (added)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSLocationsRoot.java Thu Oct 25 11:58:32 2007
@@ -0,0 +1,151 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.dfs;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.hadoop.eclipse.server.HadoopServer;
+import org.apache.hadoop.eclipse.servers.IHadoopServerListener;
+import org.apache.hadoop.eclipse.servers.ServerRegistry;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.ipc.RPC;
+
+/**
+ * Representation of the root element containing all DFS servers. This
+ * content registers an observer on Hadoop servers so as to update itself
+ * when servers are updated.
+ */
+public class DFSLocationsRoot implements DFSContent, IHadoopServerListener {
+
+  /**
+   * 
+   */
+  private final DFSContentProvider provider;
+
+  private Map<HadoopServer, DFSLocation> map =
+      new HashMap<HadoopServer, DFSLocation>();
+
+  /**
+   * Register a listeners to track DFS locations updates
+   * 
+   * @param provider the content provider this content is the root of
+   */
+  DFSLocationsRoot(DFSContentProvider provider) {
+    this.provider = provider;
+    ServerRegistry.getInstance().addListener(this);
+    this.refresh();
+  }
+
+  /*
+   * Implementation of IHadoopServerListener
+   */
+
+  /* @inheritDoc */
+  public synchronized void serverChanged(final HadoopServer location,
+      final int type) {
+
+    switch (type) {
+      case ServerRegistry.SERVER_STATE_CHANGED: {
+        this.provider.refresh(map.get(location));
+        break;
+      }
+
+      case ServerRegistry.SERVER_ADDED: {
+        DFSLocation dfsLoc = new DFSLocation(provider, location);
+        map.put(location, dfsLoc);
+        this.provider.refresh(this);
+        break;
+      }
+
+      case ServerRegistry.SERVER_REMOVED: {
+        map.remove(location);
+        this.provider.refresh(this);
+        break;
+      }
+    }
+  }
+
+  /**
+   * Recompute the map of Hadoop locations
+   */
+  private synchronized void reloadLocations() {
+    map.clear();
+    for (HadoopServer location : ServerRegistry.getInstance().getServers())
+      map.put(location, new DFSLocation(provider, location));
+  }
+
+  /* @inheritDoc */
+  @Override
+  public String toString() {
+    return "DFS Locations";
+  }
+
+  /*
+   * Implementation of DFSContent
+   */
+
+  /* @inheritDoc */
+  public synchronized DFSContent[] getChildren() {
+    return this.map.values().toArray(new DFSContent[this.map.size()]);
+  }
+
+  /* @inheritDoc */
+  public boolean hasChildren() {
+    return (this.map.size() > 0);
+  }
+
+  /* @inheritDoc */
+  public void refresh() {
+    reloadLocations();
+    this.provider.refresh(this);
+  }
+
+  /*
+   * Actions
+   */
+
+  public void disconnect() {
+    Thread closeThread = new Thread() {
+      /* @inheritDoc */
+      @Override
+      public void run() {
+        try {
+          System.out.printf("Closing all opened File Systems...\n");
+          FileSystem.closeAll();
+          System.out.printf("File Systems closed\n");
+
+        } catch (IOException ioe) {
+          ioe.printStackTrace();
+        }
+      }
+    };
+
+    closeThread.start();
+    try {
+      closeThread.join(5000);
+
+    } catch (InterruptedException ie) {
+      // Ignore
+    }
+    RPC.stopClient();
+  }
+
+}

Added: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSMessage.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSMessage.java?rev=588310&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSMessage.java (added)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSMessage.java Thu Oct 25 11:58:32 2007
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.dfs;
+
+/**
+ * DFS Content that displays a message.
+ */
+class DFSMessage implements DFSContent {
+
+  private String message;
+
+  DFSMessage(String message) {
+    this.message = message;
+  }
+
+  /* @inheritDoc */
+  @Override
+  public String toString() {
+    return this.message;
+  }
+
+  /*
+   * Implementation of DFSContent
+   */
+
+  /* @inheritDoc */
+  public DFSContent[] getChildren() {
+    return null;
+  }
+
+  /* @inheritDoc */
+  public boolean hasChildren() {
+    return false;
+  }
+
+  /* @inheritDoc */
+  public void refresh() {
+    // Nothing to do
+  }
+
+}

Copied: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSPath.java (from r566883, lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DfsPath.java)
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSPath.java?p2=lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSPath.java&p1=lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DfsPath.java&r1=566883&r2=588310&rev=588310&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DfsPath.java (original)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSPath.java Thu Oct 25 11:58:32 2007
@@ -15,176 +15,109 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.hadoop.eclipse.dfs;
 
-import java.io.BufferedInputStream;
-import java.io.BufferedReader;
+import java.io.File;
 import java.io.IOException;
-import java.io.InputStreamReader;
-import java.lang.reflect.InvocationTargetException;
 import java.util.logging.Logger;
 
-import org.apache.hadoop.eclipse.Activator;
+import org.apache.hadoop.dfs.DistributedFileSystem;
+import org.apache.hadoop.eclipse.ErrorMessageDialog;
+import org.apache.hadoop.eclipse.server.ConfProp;
 import org.apache.hadoop.eclipse.server.HadoopServer;
-import org.eclipse.core.runtime.IAdaptable;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 import org.eclipse.core.runtime.IProgressMonitor;
-import org.eclipse.core.runtime.IStatus;
-import org.eclipse.core.runtime.Status;
-import org.eclipse.jface.viewers.StructuredViewer;
-import org.eclipse.jface.viewers.Viewer;
-import org.eclipse.swt.widgets.Display;
-
-import com.jcraft.jsch.ChannelExec;
-import com.jcraft.jsch.JSchException;
-import com.jcraft.jsch.Session;
+import org.eclipse.jface.dialogs.MessageDialog;
 
 /**
  * DFS Path handling for DFS
  */
-public class DfsPath implements IAdaptable {
-
-  private Session shell;
+public abstract class DFSPath implements DFSContent {
 
-  private HadoopServer location;
+  protected final DFSContentProvider provider;
 
-  private String path;
+  protected HadoopServer location;
 
-  private final Viewer viewer;
+  private DistributedFileSystem dfs = null;
 
-  private DfsPath parent;
+  protected final Path path;
 
-  static Logger log = Logger.getLogger(DfsPath.class.getName());
+  protected final DFSPath parent;
 
-  public DfsPath(HadoopServer location, String path, Viewer viewer) {
-    this.location = location;
-    this.path = path;
-    this.viewer = viewer;
-  }
-
-  protected String getPath() {
-    return this.path;
-  }
-
-  protected ChannelExec exec(String command) throws JSchException {
-    ChannelExec channel = (ChannelExec) getSession().openChannel("exec");
-    channel.setCommand(location.getInstallPath() + "/bin/hadoop " + command);
-    channel.setErrStream(System.err);
-    // channel.connect();
-
-    return channel;
-  }
+  /**
+   * For debugging purpose
+   */
+  static Logger log = Logger.getLogger(DFSPath.class.getName());
 
-  protected DfsPath(HadoopServer location, String path, Session shell,
-      Viewer viewer) {
-    this(location, path, viewer);
+  /**
+   * Create a path representation for the given location in the given viewer
+   * 
+   * @param location
+   * @param path
+   * @param viewer
+   */
+  public DFSPath(DFSContentProvider provider, HadoopServer location)
+      throws IOException {
 
-    this.shell = shell;
+    this.provider = provider;
+    this.location = location;
+    this.path = new Path("/");
+    this.parent = null;
   }
 
-  protected DfsPath(DfsPath parent, String path) {
-    this(parent.location, path, parent.shell, parent.viewer);
+  /**
+   * Create a sub-path representation for the given parent path
+   * 
+   * @param parent
+   * @param path
+   */
+  protected DFSPath(DFSPath parent, Path path) {
+    this.provider = parent.provider;
+    this.location = parent.location;
+    this.dfs = parent.dfs;
     this.parent = parent;
-  }
-
-  protected Session getSession() throws JSchException {
-    if (shell == null) {
-      // this.shell =
-      // JSchUtilities.createJSch().getSession(location.getUser(),
-      // location.getHostname());
-      this.shell = location.createSession();
-    }
-
-    if (!shell.isConnected()) {
-      shell.connect();
-    }
-
-    return shell;
+    this.path = path;
   }
 
   protected void dispose() {
-    if ((this.shell != null) && this.shell.isConnected()) {
-      shell.disconnect();
-    }
+    // Free the DFS connection
   }
 
+  /* @inheritDoc */
   @Override
   public String toString() {
-    if ((path.length() < 1) || path.equals("/")) {
-      return "DFS @ " + location.getName();
+    if (path.equals("/")) {
+      return location.getConfProp(ConfProp.FS_DEFAULT_URI);
+
     } else {
-      String[] parts = path.split("/");
-      return parts[parts.length - 1];
+      return this.path.getName();
     }
   }
 
-  protected void doExec(final String command) {
-    org.eclipse.core.runtime.jobs.Job job =
-        new org.eclipse.core.runtime.jobs.Job("DFS operation: " + command) {
-          @Override
-          protected IStatus run(IProgressMonitor monitor) {
-            ChannelExec exec = null;
-            monitor.beginTask("Execute remote dfs  command", 100);
-            try {
-              exec = exec(" " + command);
-              monitor.worked(33);
-
-              exec.connect();
-              monitor.worked(33);
-
-              BufferedReader reader =
-                  new BufferedReader(new InputStreamReader(
-                      new BufferedInputStream(exec.getInputStream())));
-              String response = reader.readLine(); // TIDY(jz)
-              monitor.worked(34);
-
-              monitor.done();
-
-              refresh();
-
-              return Status.OK_STATUS;
-            } catch (Exception e) {
-              e.printStackTrace();
-              return new Status(IStatus.ERROR, Activator.PLUGIN_ID, -1,
-                  "DFS operation failed: " + e.getLocalizedMessage(), e);
-            } finally {
-              if (exec != null) {
-                exec.disconnect();
-              }
-            }
-          }
-        };
-
-    job.setUser(true);
-    job.schedule();
-  }
-
-  public void delete() throws JSchException {
-    doExec("dfs " + DfsFolder.s_whichFS + " -rm " + path);
+  public void delete() {
+    try {
+      dfs.delete(this.path);
+
+    } catch (IOException e) {
+      e.printStackTrace();
+      MessageDialog.openWarning(null, "Delete file",
+          "Unable to delete file \"" + this.path + "\"\n" + e);
+    }
   }
 
-  public Object getParent() {
+  public DFSPath getParent() {
     return parent;
   }
 
-  public void refresh() {
-    if (parent != null) {
-      parent.doRefresh();
-    } else {
-      doRefresh();
-    }
-  }
-
-  protected void doRefresh() {
-    Display.getDefault().syncExec(new Runnable() {
-      public void run() {
-        ((StructuredViewer) viewer).refresh(DfsPath.this);
-      }
-    });
-  }
+  public abstract void refresh();
 
-  public Object getAdapter(Class type) {
-    log.fine(type.toString());
-    return null;
+  /**
+   * Refresh the UI element for this content
+   */
+  public void doRefresh() {
+    provider.refresh(this);
   }
 
   /**
@@ -192,11 +125,33 @@
    * 
    * @param directory the local directory
    */
-  public void downloadToLocalDirectory(String directory)
-      throws InvocationTargetException, JSchException, InterruptedException,
-      IOException {
+  public abstract void downloadToLocalDirectory(IProgressMonitor monitor,
+      File dir);
+
+  public Path getPath() {
+    return this.path;
+  }
 
-    // Not implemented here; by default, do nothing
+  /**
+   * Gets a connection to the DFS
+   * 
+   * @return a connection to the DFS
+   * @throws IOException
+   */
+  DistributedFileSystem getDFS() throws IOException {
+    if (this.dfs == null) {
+      FileSystem fs = location.getDFS();
+      if (!(fs instanceof DistributedFileSystem)) {
+        ErrorMessageDialog.display("DFS Browser",
+            "The DFS Browser cannot browse anything else "
+                + "but a Distributed File System!");
+        throw new IOException("DFS Browser expects a DistributedFileSystem!");
+      }
+      this.dfs = (DistributedFileSystem) fs;
+    }
+    return this.dfs;
   }
+
+  public abstract int computeDownloadWork();
 
 }

Copied: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/HadoopApplicationLaunchShortcut.java (from r566883, lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/LaunchShortcut.java)
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/HadoopApplicationLaunchShortcut.java?p2=lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/HadoopApplicationLaunchShortcut.java&p1=lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/LaunchShortcut.java&r1=566883&r2=588310&rev=588310&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/LaunchShortcut.java (original)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/HadoopApplicationLaunchShortcut.java Thu Oct 25 11:58:32 2007
@@ -18,41 +18,125 @@
 
 package org.apache.hadoop.eclipse.launch;
 
+import java.util.ArrayList;
+import java.util.List;
 import java.util.logging.Logger;
 
-import org.apache.hadoop.eclipse.actions.RunOnHadoopActionDelegate;
+import org.apache.hadoop.eclipse.servers.RunOnHadoopWizard;
+import org.eclipse.core.resources.IFile;
 import org.eclipse.core.resources.IResource;
-import org.eclipse.debug.ui.ILaunchShortcut;
-import org.eclipse.jface.viewers.ISelection;
-import org.eclipse.jface.viewers.IStructuredSelection;
-import org.eclipse.jface.viewers.StructuredSelection;
-import org.eclipse.ui.IEditorPart;
-import org.eclipse.ui.actions.ActionDelegate;
-
+import org.eclipse.core.runtime.CoreException;
+import org.eclipse.debug.core.ILaunchConfiguration;
+import org.eclipse.debug.core.ILaunchConfigurationType;
+import org.eclipse.debug.core.ILaunchConfigurationWorkingCopy;
+import org.eclipse.jdt.core.IJavaProject;
+import org.eclipse.jdt.core.IType;
+import org.eclipse.jdt.core.JavaCore;
+import org.eclipse.jdt.internal.debug.ui.launcher.JavaApplicationLaunchShortcut;
+import org.eclipse.jdt.launching.IJavaLaunchConfigurationConstants;
+import org.eclipse.jdt.launching.IRuntimeClasspathEntry;
+import org.eclipse.jdt.launching.JavaRuntime;
+import org.eclipse.jface.wizard.IWizard;
+import org.eclipse.jface.wizard.WizardDialog;
+import org.eclipse.swt.widgets.Display;
+import org.eclipse.swt.widgets.Shell;
 
 /**
  * Add a shortcut "Run on Hadoop" to the Run menu
  */
 
-public class LaunchShortcut implements ILaunchShortcut {
-  static Logger log = Logger.getLogger(LaunchShortcut.class.getName());
+public class HadoopApplicationLaunchShortcut extends
+    JavaApplicationLaunchShortcut {
+
+  static Logger log =
+      Logger.getLogger(HadoopApplicationLaunchShortcut.class.getName());
 
-  private ActionDelegate delegate = new RunOnHadoopActionDelegate();
+  // private ActionDelegate delegate = new RunOnHadoopActionDelegate();
 
-  public LaunchShortcut() {
+  public HadoopApplicationLaunchShortcut() {
   }
 
-  public void launch(final ISelection selection, String mode) {
-    if (selection instanceof IStructuredSelection) {
-      delegate.selectionChanged(null, selection);
-      delegate.run(null);
+  /* @inheritDoc */
+  @Override
+  protected ILaunchConfiguration findLaunchConfiguration(IType type,
+      ILaunchConfigurationType configType) {
+
+    // Find an existing or create a launch configuration (Standard way)
+    ILaunchConfiguration iConf =
+        super.findLaunchConfiguration(type, configType);
+
+    ILaunchConfigurationWorkingCopy iConfWC;
+    try {
+      /*
+       * Tune the default launch configuration: setup run-time classpath
+       * manually
+       */
+      iConfWC = iConf.getWorkingCopy();
+
+      iConfWC.setAttribute(
+          IJavaLaunchConfigurationConstants.ATTR_DEFAULT_CLASSPATH, false);
+
+      List<String> classPath = new ArrayList<String>();
+      IResource resource = type.getResource();
+      IJavaProject project =
+          (IJavaProject) resource.getProject().getNature(JavaCore.NATURE_ID);
+      IRuntimeClasspathEntry cpEntry =
+          JavaRuntime.newDefaultProjectClasspathEntry(project);
+      classPath.add(0, cpEntry.getMemento());
+
+      iConfWC.setAttribute(IJavaLaunchConfigurationConstants.ATTR_CLASSPATH,
+          classPath);
+
+    } catch (CoreException e) {
+      e.printStackTrace();
+      // FIXME Error dialog
+      return null;
     }
+
+    /*
+     * Update the selected configuration with a specific Hadoop location
+     * target
+     */
+    IResource resource = type.getResource();
+    if (!(resource instanceof IFile))
+      return null;
+    RunOnHadoopWizard wizard =
+        new RunOnHadoopWizard((IFile) resource, iConfWC);
+    WizardDialog dialog =
+        new WizardDialog(Display.getDefault().getActiveShell(), wizard);
+
+    dialog.create();
+    dialog.setBlockOnOpen(true);
+    if (dialog.open() != WizardDialog.OK)
+      return null;
+
+    try {
+      iConfWC.doSave();
+
+    } catch (CoreException e) {
+      e.printStackTrace();
+      // FIXME Error dialog
+      return null;
+    }
+
+    return iConfWC;
   }
 
-  public void launch(final IEditorPart editor, String mode) {
-    delegate.selectionChanged(null, new StructuredSelection(editor
-        .getEditorInput().getAdapter(IResource.class))); // hmm(jz)
-    // :-)
-    delegate.run(null);
+  /**
+   * Was used to run the RunOnHadoopWizard inside and provide it a
+   * ProgressMonitor
+   */
+  static class Dialog extends WizardDialog {
+    public Dialog(Shell parentShell, IWizard newWizard) {
+      super(parentShell, newWizard);
+    }
+
+    @Override
+    public void create() {
+      super.create();
+
+      ((RunOnHadoopWizard) getWizard())
+          .setProgressMonitor(getProgressMonitor());
+    }
   }
 }

Copied: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/preferences/MapReducePreferencePage.java (from r566883, lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/preferences/HadoopHomeDirPreferencePage.java)
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/preferences/MapReducePreferencePage.java?p2=lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/preferences/MapReducePreferencePage.java&p1=lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/preferences/HadoopHomeDirPreferencePage.java&r1=566883&r2=588310&rev=588310&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/preferences/HadoopHomeDirPreferencePage.java (original)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/preferences/MapReducePreferencePage.java Thu Oct 25 11:58:32 2007
@@ -25,23 +25,24 @@
 
 /**
  * This class represents a preference page that is contributed to the
- * Preferences dialog. By subclassing <samp>FieldEditorPreferencePage</samp>,
+ * Preferences dialog. By sub-classing <tt>FieldEditorPreferencePage</tt>,
  * we can use the field support built into JFace that allows us to create a
  * page that is small and knows how to save, restore and apply itself.
+ * 
  * <p>
  * This page is used to modify preferences only. They are stored in the
  * preference store that belongs to the main plug-in class. That way,
  * preferences can be accessed directly via the preference store.
  */
 
-public class HadoopHomeDirPreferencePage extends FieldEditorPreferencePage
+public class MapReducePreferencePage extends FieldEditorPreferencePage
     implements IWorkbenchPreferencePage {
 
-  public HadoopHomeDirPreferencePage() {
+  public MapReducePreferencePage() {
     super(GRID);
     setPreferenceStore(Activator.getDefault().getPreferenceStore());
-    setTitle("MapReduce Tools");
-    setDescription("MapReduce Preferences");
+    setTitle("Hadoop Map/Reduce Tools");
+    // setDescription("Hadoop Map/Reduce Preferences");
   }
 
   /**
@@ -52,7 +53,7 @@
   @Override
   public void createFieldEditors() {
     addField(new DirectoryFieldEditor(PreferenceConstants.P_PATH,
-        "&Hadoop main directory:", getFieldEditorParent()));
+        "&Hadoop installation directory:", getFieldEditorParent()));
 
   }
 



Mime
View raw message