hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cutt...@apache.org
Subject svn commit: r566838 [2/4] - in /lucene/hadoop/trunk: ./ src/contrib/ src/contrib/eclipse-plugin/ src/contrib/eclipse-plugin/.settings/ src/contrib/eclipse-plugin/META-INF/ src/contrib/eclipse-plugin/resources/ src/contrib/eclipse-plugin/src/ src/contri...
Date Thu, 16 Aug 2007 20:43:16 GMT
Added: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/JSchUtilities.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/JSchUtilities.java?view=auto&rev=566838
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/JSchUtilities.java (added)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/JSchUtilities.java Thu Aug 16 13:43:12 2007
@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse;
+
+import java.util.Properties;
+
+import org.eclipse.core.runtime.Platform;
+
+import com.jcraft.jsch.JSch;
+
+/**
+ * Creates a JSCH object so that we can use the JSCH methods for connecting to
+ * remote servers via SSH/SCP.
+ */
+
+public class JSchUtilities {
+
+  static String SSH_HOME_DEFAULT = null;
+  static {
+    String ssh_dir_name = ".ssh"; //$NON-NLS-1$
+
+    // Windows doesn't like files or directories starting with a dot.
+    if (Platform.getOS().equals(Platform.OS_WIN32)) {
+      ssh_dir_name = "ssh"; //$NON-NLS-1$
+    }
+
+    SSH_HOME_DEFAULT = System.getProperty("user.home"); //$NON-NLS-1$
+    if (SSH_HOME_DEFAULT != null) {
+      SSH_HOME_DEFAULT = SSH_HOME_DEFAULT + java.io.File.separator
+          + ssh_dir_name;
+    } else {
+
+    }
+  }
+
+  public synchronized static JSch createJSch() {
+
+    // IPreferenceStore store = CVSSSH2Plugin.getDefault().getPreferenceStore();
+    // String ssh_home = store.getString(SSH_HOME_DEFAULT);
+    String ssh_home = SSH_HOME_DEFAULT;
+
+    Properties props = new Properties();
+    props.setProperty("StrictHostKeyChecking", "no");
+
+    JSch jsch = new JSch();
+    JSch.setConfig(props);
+    /*
+     * JSch.setLogger(new Logger() { public boolean isEnabled(int level) {
+     * return true; }
+     * 
+     * public void log(int level, String message) { System.out.println("JSCH
+     * Level " + level + ": " + message); } });
+     */
+
+    try {
+      java.io.File file;
+      file = new java.io.File(ssh_home, "known_hosts"); //$NON-NLS-1$
+      jsch.setKnownHosts(file.getPath());
+    } catch (Exception e) {
+    }
+
+    return jsch;
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/MapReduceNature.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/MapReduceNature.java?view=auto&rev=566838
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/MapReduceNature.java (added)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/MapReduceNature.java Thu Aug 16 13:43:12 2007
@@ -0,0 +1,146 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse;
+
+import java.io.File;
+import java.io.FileFilter;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.eclipse.core.resources.IProject;
+import org.eclipse.core.resources.IProjectNature;
+import org.eclipse.core.runtime.CoreException;
+import org.eclipse.core.runtime.NullProgressMonitor;
+import org.eclipse.core.runtime.Path;
+import org.eclipse.core.runtime.QualifiedName;
+import org.eclipse.jdt.core.IClasspathEntry;
+import org.eclipse.jdt.core.IJavaProject;
+import org.eclipse.jdt.core.JavaCore;
+
+/**
+ * Class to configure and deconfigure an Eclipse project with the MapReduce
+ * project nature.
+ */
+
+public class MapReduceNature implements IProjectNature {
+
+  public static final String ID = "org.apache.hadoop.eclipse.Nature";
+
+  private IProject project;
+
+  static Logger log = Logger.getLogger(MapReduceNature.class.getName());
+
+  /**
+   * Configures an Eclipse project as a MapReduce project by adding the
+   * Hadoop libraries to a project's classpath.
+   */
+  public void configure() throws CoreException {
+    String path =
+        project.getPersistentProperty(new QualifiedName(Activator.PLUGIN_ID,
+            "hadoop.runtime.path"));
+
+    File dir = new File(path);
+    final ArrayList<File> coreJars = new ArrayList<File>();
+    dir.listFiles(new FileFilter() {
+      public boolean accept(File pathname) {
+        String fileName = pathname.getName();
+
+        // get the hadoop core jar without touching test or examples
+        // older version of hadoop don't use the word "core" -- eyhung
+        if ((fileName.indexOf("hadoop") != -1) && (fileName.endsWith("jar"))
+            && (fileName.indexOf("test") == -1)
+            && (fileName.indexOf("examples") == -1)) {
+          coreJars.add(pathname);
+        }
+
+        return false; // we don't care what this returns
+      }
+    });
+    File dir2 = new File(path + File.separatorChar + "lib");
+    if (dir2.exists() && dir2.isDirectory()) {
+      dir2.listFiles(new FileFilter() {
+        public boolean accept(File pathname) {
+          if ((!pathname.isDirectory())
+              && (pathname.getName().endsWith("jar"))) {
+            coreJars.add(pathname);
+          }
+
+          return false; // we don't care what this returns
+        }
+      });
+    }
+
+    // Add Hadoop libraries onto classpath
+    IJavaProject javaProject = JavaCore.create(getProject());
+    // Bundle bundle = Activator.getDefault().getBundle();
+    try {
+      IClasspathEntry[] currentCp = javaProject.getRawClasspath();
+      IClasspathEntry[] newCp =
+          new IClasspathEntry[currentCp.length + coreJars.size()];
+      System.arraycopy(currentCp, 0, newCp, 0, currentCp.length);
+
+      final Iterator i = coreJars.iterator();
+      int count = 0;
+      while (i.hasNext()) {
+        // for (int i = 0; i < s_coreJarNames.length; i++) {
+
+        final File f = (File) i.next();
+        // URL url = FileLocator.toFileURL(FileLocator.find(bundle, new
+        // Path("lib/" + s_coreJarNames[i]), null));
+        URL url = f.toURL();
+        log.finer("hadoop library url.getPath() = " + url.getPath());
+
+        newCp[newCp.length - 1 - count] =
+            JavaCore.newLibraryEntry(new Path(url.getPath()), null, null);
+        count++;
+      }
+
+      javaProject.setRawClasspath(newCp, new NullProgressMonitor());
+    } catch (Exception e) {
+      log.log(Level.SEVERE, "IOException generated in HadoukenNature.class",
+          e);
+    }
+  }
+
+  /**
+   * Deconfigure a project from MapReduce status. Currently unimplemented.
+   */
+  public void deconfigure() throws CoreException {
+    // TODO Auto-generated method stub
+
+  }
+
+  /**
+   * Returns the project to which this project nature applies.
+   */
+  public IProject getProject() {
+    return this.project;
+  }
+
+  /**
+   * Sets the project to which this nature applies. Used when instantiating
+   * this project nature runtime.
+   */
+  public void setProject(IProject project) {
+    this.project = project;
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewDriverWizard.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewDriverWizard.java?view=auto&rev=566838
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewDriverWizard.java (added)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewDriverWizard.java Thu Aug 16 13:43:12 2007
@@ -0,0 +1,99 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse;
+
+import org.eclipse.core.resources.IFile;
+import org.eclipse.core.runtime.CoreException;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.jdt.core.IJavaElement;
+import org.eclipse.jdt.internal.ui.wizards.NewElementWizard;
+import org.eclipse.jface.operation.IRunnableWithProgress;
+import org.eclipse.jface.viewers.IStructuredSelection;
+import org.eclipse.ui.INewWizard;
+import org.eclipse.ui.IWorkbench;
+
+/**
+ * Wizard for creating a new Driver class (a class that runs a MapReduce job).
+ * 
+ */
+
+public class NewDriverWizard extends NewElementWizard implements INewWizard,
+    IRunnableWithProgress {
+  private NewDriverWizardPage page;
+
+  /*
+   * @Override public boolean performFinish() { }
+   */
+  public void run(IProgressMonitor monitor) {
+    try {
+      page.createType(monitor);
+    } catch (CoreException e) {
+      // TODO Auto-generated catch block
+      e.printStackTrace();
+    } catch (InterruptedException e) {
+      // TODO Auto-generated catch block
+      e.printStackTrace();
+    }
+  }
+
+  public NewDriverWizard() {
+    setWindowTitle("New MapReduce Driver");
+  }
+
+  @Override
+  public void init(IWorkbench workbench, IStructuredSelection selection) {
+    super.init(workbench, selection);
+
+    page = new NewDriverWizardPage();
+    addPage(page);
+    page.setSelection(selection);
+  }
+
+  @Override
+  /**
+   * Performs any actions appropriate in response to the user having pressed the
+   * Finish button, or refuse if finishing now is not permitted.
+   */
+  public boolean performFinish() {
+    if (super.performFinish()) {
+      if (getCreatedElement() != null) {
+        selectAndReveal(page.getModifiedResource());
+        openResource((IFile) page.getModifiedResource());
+      }
+
+      return true;
+    } else {
+      return false;
+    }
+  }
+
+  @Override
+  /**
+   * 
+   */
+  protected void finishPage(IProgressMonitor monitor)
+      throws InterruptedException, CoreException {
+    this.run(monitor);
+  }
+
+  @Override
+  public IJavaElement getCreatedElement() {
+    return page.getCreatedType().getPrimaryElement();
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewDriverWizardPage.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewDriverWizardPage.java?view=auto&rev=566838
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewDriverWizardPage.java (added)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewDriverWizardPage.java Thu Aug 16 13:43:12 2007
@@ -0,0 +1,270 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.eclipse;
+
+import java.io.IOException;
+import java.util.ArrayList;
+
+import org.eclipse.core.runtime.CoreException;
+import org.eclipse.core.runtime.FileLocator;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.core.runtime.IStatus;
+import org.eclipse.core.runtime.Path;
+import org.eclipse.jdt.core.IType;
+import org.eclipse.jdt.core.JavaModelException;
+import org.eclipse.jdt.core.search.SearchEngine;
+import org.eclipse.jdt.ui.IJavaElementSearchConstants;
+import org.eclipse.jdt.ui.JavaUI;
+import org.eclipse.jdt.ui.wizards.NewTypeWizardPage;
+import org.eclipse.jface.dialogs.ProgressMonitorDialog;
+import org.eclipse.jface.resource.ImageDescriptor;
+import org.eclipse.jface.viewers.IStructuredSelection;
+import org.eclipse.jface.window.Window;
+import org.eclipse.swt.SWT;
+import org.eclipse.swt.layout.GridData;
+import org.eclipse.swt.layout.GridLayout;
+import org.eclipse.swt.widgets.Button;
+import org.eclipse.swt.widgets.Composite;
+import org.eclipse.swt.widgets.Event;
+import org.eclipse.swt.widgets.Label;
+import org.eclipse.swt.widgets.Listener;
+import org.eclipse.swt.widgets.Text;
+import org.eclipse.ui.dialogs.SelectionDialog;
+
+/**
+ * Pre-fills the new MapReduce driver class with a template.
+ * 
+ */
+
+public class NewDriverWizardPage extends NewTypeWizardPage {
+  private Button isCreateMapMethod;
+
+  private Text reducerText;
+
+  private Text mapperText;
+
+  private final boolean showContainerSelector;
+
+  public NewDriverWizardPage() {
+    this(true);
+  }
+
+  public NewDriverWizardPage(boolean showContainerSelector) {
+    super(true, "MapReduce Driver");
+
+    this.showContainerSelector = showContainerSelector;
+    setTitle("MapReduce Driver");
+    setDescription("Create a new MapReduce driver.");
+    try {
+      setImageDescriptor(ImageDescriptor.createFromURL((FileLocator
+          .toFileURL(FileLocator.find(Activator.getDefault().getBundle(),
+              new Path("resources/driverwiz.png"), null)))));
+    } catch (IOException e) {
+      // TODO Auto-generated catch block
+      e.printStackTrace();
+    }
+  }
+
+  public void setSelection(IStructuredSelection selection) {
+    initContainerPage(getInitialJavaElement(selection));
+    initTypePage(getInitialJavaElement(selection));
+  }
+
+  @Override
+  /**
+   * Creates the new type using the entered field values.
+   */
+  public void createType(IProgressMonitor monitor) throws CoreException,
+      InterruptedException {
+    super.createType(monitor);
+  }
+
+  @Override
+  protected void createTypeMembers(final IType newType, ImportsManager imports,
+      final IProgressMonitor monitor) throws CoreException {
+    super.createTypeMembers(newType, imports, monitor);
+    imports.addImport("org.apache.hadoop.fs.Path");
+    imports.addImport("org.apache.hadoop.io.Text");
+    imports.addImport("org.apache.hadoop.io.IntWritable");
+    imports.addImport("org.apache.hadoop.mapred.JobClient");
+    imports.addImport("org.apache.hadoop.mapred.JobConf");
+    imports.addImport("org.apache.hadoop.mapred.Reducer");
+    imports.addImport("org.apache.hadoop.mapred.Mapper");
+
+    /**
+     * TODO(jz) - move most code out of the runnable
+     */
+    getContainer().getShell().getDisplay().syncExec(new Runnable() {
+      public void run() {
+
+        String method = "public static void main(String[] args) {\n JobClient client = new JobClient();";
+        method += "JobConf conf = new JobConf("
+            + newType.getFullyQualifiedName() + ".class);\n\n";
+
+        method += "// TODO: specify output types\nconf.setOutputKeyClass(Text.class);\nconf.setOutputValueClass(IntWritable.class);\n\n";
+
+        method += "// TODO: specify input and output DIRECTORIES (not files)\nconf.setInputPath(new Path(\"src\"));\nconf.setOutputPath(new Path(\"out\"));\n\n";
+
+        if (mapperText.getText().length() > 0) {
+          method += "conf.setMapperClass(" + mapperText.getText()
+              + ".class);\n\n";
+        } else {
+          method += "// TODO: specify a mapper\nconf.setMapperClass(org.apache.hadoop.mapred.lib.IdentityMapper.class);\n\n";
+        }
+        if (reducerText.getText().length() > 0) {
+          method += "conf.setReducerClass(" + reducerText.getText()
+              + ".class);\n\n";
+        } else {
+          method += "// TODO: specify a reducer\nconf.setReducerClass(org.apache.hadoop.mapred.lib.IdentityReducer.class);\n\n";
+        }
+
+        method += "client.setConf(conf);\n";
+        method += "try {\n\tJobClient.runJob(conf);\n} catch (Exception e) {\n"
+            + "\te.printStackTrace();\n}\n";
+        method += "}\n";
+
+        try {
+          newType.createMethod(method, null, false, monitor);
+        } catch (JavaModelException e) {
+          // TODO Auto-generated catch block
+          e.printStackTrace();
+        }
+      }
+    });
+  }
+
+  public void createControl(Composite parent) {
+    // super.createControl(parent);
+
+    initializeDialogUnits(parent);
+    Composite composite = new Composite(parent, SWT.NONE);
+    GridLayout layout = new GridLayout();
+    layout.numColumns = 4;
+    composite.setLayout(layout);
+
+    createContainerControls(composite, 4);
+
+    createPackageControls(composite, 4);
+    createSeparator(composite, 4);
+    createTypeNameControls(composite, 4);
+
+    createSuperClassControls(composite, 4);
+    createSuperInterfacesControls(composite, 4);
+    createSeparator(composite, 4);
+
+    createMapperControls(composite);
+    createReducerControls(composite);
+
+    if (!showContainerSelector) {
+      setPackageFragmentRoot(null, false);
+      setSuperClass("java.lang.Object", false);
+      setSuperInterfaces(new ArrayList(), false);
+    }
+
+    setControl(composite);
+
+    setFocus();
+    handleFieldChanged(CONTAINER);
+
+    // setSuperClass("org.apache.hadoop.mapred.MapReduceBase", true);
+    // setSuperInterfaces(Arrays.asList(new String[]{
+    // "org.apache.hadoop.mapred.Mapper" }), true);
+  }
+
+  @Override
+  protected void handleFieldChanged(String fieldName) {
+    super.handleFieldChanged(fieldName);
+
+    validate();
+  }
+
+  private void validate() {
+    if (showContainerSelector) {
+      updateStatus(new IStatus[] { fContainerStatus, fPackageStatus,
+          fTypeNameStatus, fSuperClassStatus, fSuperInterfacesStatus });
+    } else {
+      updateStatus(new IStatus[] { fTypeNameStatus, });
+    }
+  }
+
+  private void createMapperControls(Composite composite) {
+    this.mapperText = createBrowseClassControl(composite, "Ma&pper:",
+        "&Browse...", "org.apache.hadoop.mapred.Mapper", "Mapper Selection");
+  }
+
+  private void createReducerControls(Composite composite) {
+    this.reducerText = createBrowseClassControl(composite, "&Reducer:",
+        "Browse&...", "org.apache.hadoop.mapred.Reducer", "Reducer Selection");
+  }
+
+  private Text createBrowseClassControl(final Composite composite,
+      final String string, String browseButtonLabel,
+      final String baseClassName, final String dialogTitle) {
+    Label label = new Label(composite, SWT.NONE);
+    GridData data = new GridData(GridData.FILL_HORIZONTAL);
+    label.setText(string);
+    label.setLayoutData(data);
+
+    final Text text = new Text(composite, SWT.SINGLE | SWT.BORDER);
+    GridData data2 = new GridData(GridData.FILL_HORIZONTAL);
+    data2.horizontalSpan = 2;
+    text.setLayoutData(data2);
+
+    Button browse = new Button(composite, SWT.NONE);
+    browse.setText(browseButtonLabel);
+    GridData data3 = new GridData(GridData.FILL_HORIZONTAL);
+    browse.setLayoutData(data3);
+    browse.addListener(SWT.Selection, new Listener() {
+      public void handleEvent(Event event) {
+        IType baseType;
+        try {
+          baseType = getPackageFragmentRoot().getJavaProject().findType(
+              baseClassName);
+
+          // edit this to limit the scope
+          SelectionDialog dialog = JavaUI.createTypeDialog(
+              composite.getShell(), new ProgressMonitorDialog(composite
+                  .getShell()), SearchEngine.createHierarchyScope(baseType),
+              IJavaElementSearchConstants.CONSIDER_CLASSES, false);
+
+          dialog.setMessage("&Choose a type:");
+          dialog.setBlockOnOpen(true);
+          dialog.setTitle(dialogTitle);
+          dialog.open();
+
+          if ((dialog.getReturnCode() == Window.OK)
+              && (dialog.getResult().length > 0)) {
+            IType type = (IType) dialog.getResult()[0];
+            text.setText(type.getFullyQualifiedName());
+          }
+        } catch (JavaModelException e) {
+          // TODO Auto-generated catch block
+          e.printStackTrace();
+        }
+      }
+    });
+
+    if (!showContainerSelector) {
+      label.setEnabled(false);
+      text.setEnabled(false);
+      browse.setEnabled(false);
+    }
+
+    return text;
+  }
+}
\ No newline at end of file

Added: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewMapReduceProjectWizard.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewMapReduceProjectWizard.java?view=auto&rev=566838
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewMapReduceProjectWizard.java (added)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewMapReduceProjectWizard.java Thu Aug 16 13:43:12 2007
@@ -0,0 +1,412 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse;
+
+import java.io.File;
+import java.io.FilenameFilter;
+import java.lang.reflect.InvocationTargetException;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.hadoop.eclipse.preferences.HadoopHomeDirPreferencePage;
+import org.apache.hadoop.eclipse.preferences.PreferenceConstants;
+import org.eclipse.core.resources.IProject;
+import org.eclipse.core.resources.IProjectDescription;
+import org.eclipse.core.resources.ResourcesPlugin;
+import org.eclipse.core.runtime.CoreException;
+import org.eclipse.core.runtime.FileLocator;
+import org.eclipse.core.runtime.IConfigurationElement;
+import org.eclipse.core.runtime.IExecutableExtension;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.core.runtime.NullProgressMonitor;
+import org.eclipse.core.runtime.Path;
+import org.eclipse.core.runtime.QualifiedName;
+import org.eclipse.core.runtime.SubProgressMonitor;
+import org.eclipse.jdt.ui.wizards.NewJavaProjectWizardPage;
+import org.eclipse.jface.dialogs.IDialogConstants;
+import org.eclipse.jface.operation.IRunnableWithProgress;
+import org.eclipse.jface.preference.PreferenceDialog;
+import org.eclipse.jface.preference.PreferenceManager;
+import org.eclipse.jface.preference.PreferenceNode;
+import org.eclipse.jface.resource.ImageDescriptor;
+import org.eclipse.jface.viewers.IStructuredSelection;
+import org.eclipse.jface.wizard.IWizardPage;
+import org.eclipse.jface.wizard.Wizard;
+import org.eclipse.swt.SWT;
+import org.eclipse.swt.events.SelectionEvent;
+import org.eclipse.swt.events.SelectionListener;
+import org.eclipse.swt.layout.GridData;
+import org.eclipse.swt.layout.GridLayout;
+import org.eclipse.swt.widgets.Button;
+import org.eclipse.swt.widgets.Composite;
+import org.eclipse.swt.widgets.DirectoryDialog;
+import org.eclipse.swt.widgets.Group;
+import org.eclipse.swt.widgets.Link;
+import org.eclipse.swt.widgets.Text;
+import org.eclipse.ui.IWorkbench;
+import org.eclipse.ui.IWorkbenchWizard;
+import org.eclipse.ui.PlatformUI;
+import org.eclipse.ui.dialogs.WizardNewProjectCreationPage;
+import org.eclipse.ui.wizards.newresource.BasicNewProjectResourceWizard;
+
+/**
+ * Wizard for creating a new MapReduce Project
+ * 
+ */
+
+public class NewMapReduceProjectWizard extends Wizard implements
+    IWorkbenchWizard, IExecutableExtension {
+  static Logger log = Logger.getLogger(NewMapReduceProjectWizard.class
+      .getName());
+
+  private HadoopFirstPage firstPage;
+
+  private NewJavaProjectWizardPage javaPage;
+
+  public NewDriverWizardPage newDriverPage;
+
+  private IConfigurationElement config;
+
+  public NewMapReduceProjectWizard() {
+    setWindowTitle("New MapReduce Project Wizard");
+  }
+
+  public void init(IWorkbench workbench, IStructuredSelection selection) {
+
+  }
+
+  @Override
+  public boolean canFinish() {
+    return firstPage.isPageComplete() && javaPage.isPageComplete()
+    // && ((!firstPage.generateDriver.getSelection())
+    // || newDriverPage.isPageComplete()
+    ;
+  }
+
+  @Override
+  public IWizardPage getNextPage(IWizardPage page) {
+    // if (page == firstPage
+    // && firstPage.generateDriver.getSelection()
+    // )
+    // {
+    // return newDriverPage; // if "generate mapper" checked, second page is
+    // new driver page
+    // }
+    // else
+    // {
+    IWizardPage answer = super.getNextPage(page);
+    if (answer == newDriverPage) {
+      return null; // dont flip to new driver page unless "generate
+      // driver" is checked
+    } else if (answer == javaPage) {
+      return answer;
+    } else {
+      return answer;
+    }
+    // }
+  }
+
+  @Override
+  public IWizardPage getPreviousPage(IWizardPage page) {
+    if (page == newDriverPage) {
+      return firstPage; // newDriverPage, if it appears, is the second
+      // page
+    } else {
+      return super.getPreviousPage(page);
+    }
+  }
+
+  static class HadoopFirstPage extends WizardNewProjectCreationPage implements
+      SelectionListener {
+    public HadoopFirstPage() {
+      super("New Hadoop Project");
+
+      try {
+        setImageDescriptor(ImageDescriptor.createFromURL((FileLocator
+            .toFileURL(FileLocator.find(Activator.getDefault().getBundle(),
+                new Path("resources/projwiz.png"), null)))));
+      } catch (Exception e) {
+        // TODO Auto-generated catch block
+        e.printStackTrace();
+      }
+    }
+
+    private Link openPreferences;
+
+    private Button workspaceHadoop;
+
+    private Button projectHadoop;
+
+    private Text location;
+
+    private Button browse;
+
+    private String path;
+
+    public String currentPath;
+
+    private Button generateDriver;
+
+    @Override
+    public void createControl(Composite parent) {
+      super.createControl(parent);
+
+      setTitle("MapReduce Project");
+      setDescription("Create a MapReduce project.");
+
+      Group group = new Group((Composite) getControl(), SWT.NONE);
+      group.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
+      group.setText("Hadoop MapReduce Library Installation Path");
+      GridLayout layout = new GridLayout(3, true);
+      layout.marginLeft = convertHorizontalDLUsToPixels(IDialogConstants.HORIZONTAL_MARGIN);
+      layout.marginRight = convertHorizontalDLUsToPixels(IDialogConstants.HORIZONTAL_MARGIN);
+      layout.marginTop = convertHorizontalDLUsToPixels(IDialogConstants.VERTICAL_MARGIN);
+      layout.marginBottom = convertHorizontalDLUsToPixels(IDialogConstants.VERTICAL_MARGIN);
+      group.setLayout(layout);
+
+      workspaceHadoop = new Button(group, SWT.RADIO);
+      GridData d = new GridData(GridData.BEGINNING, GridData.BEGINNING, false,
+          false);
+      d.horizontalSpan = 2;
+      workspaceHadoop.setLayoutData(d);
+      // workspaceHadoop.setText("Use default workbench Hadoop library
+      // location");
+      workspaceHadoop.setSelection(true);
+
+      updateHadoopDirLabelFromPreferences();
+
+      openPreferences = new Link(group, SWT.NONE);
+      openPreferences.setText("<a>Configure Hadoop install directory...</a>");
+      openPreferences.setLayoutData(new GridData(GridData.END, GridData.CENTER,
+          false, false));
+      openPreferences.addSelectionListener(this);
+
+      projectHadoop = new Button(group, SWT.RADIO);
+      projectHadoop.setLayoutData(new GridData(GridData.BEGINNING,
+          GridData.CENTER, false, false));
+      projectHadoop.setText("Specify Hadoop library location");
+
+      location = new Text(group, SWT.SINGLE | SWT.BORDER);
+      location.setText("");
+      d = new GridData(GridData.END, GridData.CENTER, true, false);
+      d.horizontalSpan = 1;
+      d.widthHint = 250;
+      d.grabExcessHorizontalSpace = true;
+      location.setLayoutData(d);
+      location.setEnabled(false);
+
+      browse = new Button(group, SWT.NONE);
+      browse.setText("Browse...");
+      browse.setLayoutData(new GridData(GridData.BEGINNING, GridData.CENTER,
+          false, false));
+      browse.setEnabled(false);
+      browse.addSelectionListener(this);
+
+      projectHadoop.addSelectionListener(this);
+      workspaceHadoop.addSelectionListener(this);
+
+      // generateDriver = new Button((Composite) getControl(), SWT.CHECK);
+      // generateDriver.setText("Generate a MapReduce driver");
+      // generateDriver.addListener(SWT.Selection, new Listener()
+      // {
+      // public void handleEvent(Event event) {
+      // getContainer().updateButtons(); }
+      // });
+    }
+
+    @Override
+    public boolean isPageComplete() {
+      boolean validHadoop = validateHadoopLocation();
+
+      if (!validHadoop && isCurrentPage()) {
+        setErrorMessage("Invalid Hadoop Runtime specified; please click 'Configure Hadoop install directory' or fill in library location input field");
+      } else {
+        setErrorMessage(null);
+      }
+
+      return super.isPageComplete() && validHadoop;
+    }
+
+    private boolean validateHadoopLocation() {
+      FilenameFilter gotHadoopJar = new FilenameFilter() {
+        public boolean accept(File dir, String name) {
+          return (name.startsWith("hadoop") && name.endsWith(".jar")
+              && (name.indexOf("test") == -1) && (name.indexOf("examples") == -1));
+        }
+      };
+
+      if (workspaceHadoop.getSelection()) {
+        this.currentPath = path;
+        return new Path(path).toFile().exists()
+            && (new Path(path).toFile().list(gotHadoopJar).length > 0);
+      } else {
+        this.currentPath = location.getText();
+        File file = new Path(location.getText()).toFile();
+        return file.exists()
+            && (new Path(location.getText()).toFile().list(gotHadoopJar).length > 0);
+      }
+    }
+
+    private void updateHadoopDirLabelFromPreferences() {
+      path = Activator.getDefault().getPreferenceStore().getString(
+          PreferenceConstants.P_PATH);
+
+      if ((path != null) && (path.length() > 0)) {
+        workspaceHadoop.setText("Use default Hadoop");
+      } else {
+        workspaceHadoop.setText("Use default Hadoop (currently not set)");
+      }
+    }
+
+    public void widgetDefaultSelected(SelectionEvent e) {
+    }
+
+    public void widgetSelected(SelectionEvent e) {
+      if (e.getSource() == openPreferences) {
+        PreferenceManager manager = new PreferenceManager();
+        manager.addToRoot(new PreferenceNode("Hadoop Installation Directory",
+            new HadoopHomeDirPreferencePage()));
+        PreferenceDialog dialog = new PreferenceDialog(this.getShell(), manager);
+        dialog.create();
+        dialog.setMessage("Select Hadoop Installation Directory");
+        dialog.setBlockOnOpen(true);
+        dialog.open();
+
+        updateHadoopDirLabelFromPreferences();
+      } else if (e.getSource() == browse) {
+        DirectoryDialog dialog = new DirectoryDialog(this.getShell());
+        dialog
+            .setMessage("Select a hadoop installation, containing hadoop-X-core.jar");
+        dialog.setText("Select Hadoop Installation Directory");
+        String directory = dialog.open();
+
+        if (directory != null) {
+          location.setText(directory);
+
+          if (!validateHadoopLocation()) {
+            setErrorMessage("No Hadoop jar found in specified directory");
+          } else {
+            setErrorMessage(null);
+          }
+        }
+      } else if (projectHadoop.getSelection()) {
+        location.setEnabled(true);
+        browse.setEnabled(true);
+      } else {
+        location.setEnabled(false);
+        browse.setEnabled(false);
+      }
+
+      getContainer().updateButtons();
+    }
+  }
+
+  @Override
+  public void addPages() {
+    /*
+     * firstPage = new HadoopFirstPage(); addPage(firstPage ); addPage( new
+     * JavaProjectWizardSecondPage(firstPage) );
+     */
+
+    firstPage = new HadoopFirstPage();
+    javaPage = new NewJavaProjectWizardPage(ResourcesPlugin.getWorkspace()
+        .getRoot(), firstPage);
+    // newDriverPage = new NewDriverWizardPage(false);
+    // newDriverPage.setPageComplete(false); // ensure finish button
+    // initially disabled
+    addPage(firstPage);
+    addPage(javaPage);
+
+    // addPage(newDriverPage);
+  }
+
+  @Override
+  public boolean performFinish() {
+    try {
+      PlatformUI.getWorkbench().getProgressService().runInUI(
+          this.getContainer(), new IRunnableWithProgress() {
+            public void run(IProgressMonitor monitor) {
+              try {
+                monitor.beginTask("Create Hadoop Project", 300);
+
+                javaPage.getRunnable()
+                    .run(new SubProgressMonitor(monitor, 100));
+
+                // if( firstPage.generateDriver.getSelection())
+                // {
+                // newDriverPage.setPackageFragmentRoot(javaPage.getNewJavaProject().getAllPackageFragmentRoots()[0],
+                // false);
+                // newDriverPage.getRunnable().run(new
+                // SubProgressMonitor(monitor,100));
+                // }
+
+                IProject project = javaPage.getNewJavaProject().getResource()
+                    .getProject();
+                IProjectDescription description = project.getDescription();
+                String[] existingNatures = description.getNatureIds();
+                String[] natures = new String[existingNatures.length + 1];
+                for (int i = 0; i < existingNatures.length; i++) {
+                  natures[i + 1] = existingNatures[i];
+                }
+
+                natures[0] = MapReduceNature.ID;
+                description.setNatureIds(natures);
+
+                project.setPersistentProperty(new QualifiedName(
+                    Activator.PLUGIN_ID, "hadoop.runtime.path"),
+                    firstPage.currentPath);
+                project.setDescription(description, new NullProgressMonitor());
+
+                String[] natureIds = project.getDescription().getNatureIds();
+                for (int i = 0; i < natureIds.length; i++) {
+                  log.fine("Nature id # " + i + " > " + natureIds[i]);
+                }
+
+                monitor.worked(100);
+                monitor.done();
+
+                BasicNewProjectResourceWizard.updatePerspective(config);
+              } catch (CoreException e) {
+                // TODO Auto-generated catch block
+                log.log(Level.SEVERE, "CoreException thrown.", e);
+              } catch (InvocationTargetException e) {
+                // TODO Auto-generated catch block
+                e.printStackTrace();
+              } catch (InterruptedException e) {
+                // TODO Auto-generated catch block
+                e.printStackTrace();
+              }
+            }
+          }, null);
+    } catch (InvocationTargetException e) {
+      // TODO Auto-generated catch block
+      e.printStackTrace();
+    } catch (InterruptedException e) {
+      // TODO Auto-generated catch block
+      e.printStackTrace();
+    }
+
+    return true;
+  }
+
+  public void setInitializationData(IConfigurationElement config,
+      String propertyName, Object data) throws CoreException {
+    this.config = config;
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewMapperWizard.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewMapperWizard.java?view=auto&rev=566838
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewMapperWizard.java (added)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewMapperWizard.java Thu Aug 16 13:43:12 2007
@@ -0,0 +1,189 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse;
+
+import java.io.IOException;
+import java.util.Arrays;
+
+import org.eclipse.core.resources.IFile;
+import org.eclipse.core.runtime.CoreException;
+import org.eclipse.core.runtime.FileLocator;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.core.runtime.IStatus;
+import org.eclipse.core.runtime.Path;
+import org.eclipse.jdt.core.IJavaElement;
+import org.eclipse.jdt.core.IType;
+import org.eclipse.jdt.internal.ui.wizards.NewElementWizard;
+import org.eclipse.jdt.ui.wizards.NewTypeWizardPage;
+import org.eclipse.jface.operation.IRunnableWithProgress;
+import org.eclipse.jface.resource.ImageDescriptor;
+import org.eclipse.jface.viewers.IStructuredSelection;
+import org.eclipse.swt.SWT;
+import org.eclipse.swt.layout.GridLayout;
+import org.eclipse.swt.widgets.Button;
+import org.eclipse.swt.widgets.Composite;
+import org.eclipse.ui.INewWizard;
+import org.eclipse.ui.IWorkbench;
+
+/**
+ * Wizard for creating a new Mapper class (a class that runs the Map portion of
+ * a MapReduce job). The class is pre-filled with a template.
+ * 
+ */
+
+public class NewMapperWizard extends NewElementWizard implements INewWizard,
+    IRunnableWithProgress {
+  private Page page;
+
+  public NewMapperWizard() {
+    setWindowTitle("New Mapper");
+  }
+
+  public void run(IProgressMonitor monitor) {
+    try {
+      page.createType(monitor);
+    } catch (CoreException e) {
+      // TODO Auto-generated catch block
+      e.printStackTrace();
+    } catch (InterruptedException e) {
+      // TODO Auto-generated catch block
+      e.printStackTrace();
+    }
+  }
+
+  @Override
+  public void init(IWorkbench workbench, IStructuredSelection selection) {
+    super.init(workbench, selection);
+
+    page = new Page();
+    addPage(page);
+    page.setSelection(selection);
+  }
+
+  public static class Page extends NewTypeWizardPage {
+    private Button isCreateMapMethod;
+
+    public Page() {
+      super(true, "Mapper");
+
+      setTitle("Mapper");
+      setDescription("Create a new Mapper implementation.");
+      try {
+        setImageDescriptor(ImageDescriptor.createFromURL((FileLocator
+            .toFileURL(FileLocator.find(Activator.getDefault().getBundle(),
+                new Path("resources/mapwiz.png"), null)))));
+      } catch (IOException e) {
+        // TODO Auto-generated catch block
+        e.printStackTrace();
+      }
+
+    }
+
+    public void setSelection(IStructuredSelection selection) {
+      initContainerPage(getInitialJavaElement(selection));
+      initTypePage(getInitialJavaElement(selection));
+    }
+
+    @Override
+    public void createType(IProgressMonitor monitor) throws CoreException,
+        InterruptedException {
+      super.createType(monitor);
+    }
+
+    @Override
+    protected void createTypeMembers(IType newType, ImportsManager imports,
+        IProgressMonitor monitor) throws CoreException {
+      super.createTypeMembers(newType, imports, monitor);
+      imports.addImport("java.io.IOException");
+      imports.addImport("org.apache.hadoop.io.WritableComparable");
+      imports.addImport("org.apache.hadoop.io.Writable");
+      imports.addImport("org.apache.hadoop.mapred.OutputCollector");
+      imports.addImport("org.apache.hadoop.mapred.Reporter");
+      newType
+          .createMethod(
+              "public void map(WritableComparable key, Writable values, OutputCollector output, Reporter reporter) throws IOException \n{\n}\n",
+              null, false, monitor);
+    }
+
+    public void createControl(Composite parent) {
+      // super.createControl(parent);
+
+      initializeDialogUnits(parent);
+      Composite composite = new Composite(parent, SWT.NONE);
+      GridLayout layout = new GridLayout();
+      layout.numColumns = 4;
+      composite.setLayout(layout);
+
+      createContainerControls(composite, 4);
+      createPackageControls(composite, 4);
+      createSeparator(composite, 4);
+      createTypeNameControls(composite, 4);
+      createSuperClassControls(composite, 4);
+      createSuperInterfacesControls(composite, 4);
+      // createSeparator(composite, 4);
+
+      setControl(composite);
+
+      setSuperClass("org.apache.hadoop.mapred.MapReduceBase", true);
+      setSuperInterfaces(Arrays
+          .asList(new String[] { "org.apache.hadoop.mapred.Mapper" }), true);
+
+      setFocus();
+      validate();
+    }
+
+    @Override
+    protected void handleFieldChanged(String fieldName) {
+      super.handleFieldChanged(fieldName);
+
+      validate();
+    }
+
+    private void validate() {
+      updateStatus(new IStatus[] { fContainerStatus, fPackageStatus,
+          fTypeNameStatus, fSuperClassStatus, fSuperInterfacesStatus });
+    }
+  }
+
+  @Override
+  public boolean performFinish() {
+    if (super.performFinish()) {
+      if (getCreatedElement() != null) {
+        openResource((IFile) page.getModifiedResource());
+        selectAndReveal(page.getModifiedResource());
+      }
+
+      return true;
+    } else {
+      return false;
+    }
+  }
+
+  @Override
+  protected void finishPage(IProgressMonitor monitor)
+      throws InterruptedException, CoreException {
+    this.run(monitor);
+  }
+
+  @Override
+  public IJavaElement getCreatedElement() {
+    return page.getCreatedType().getPrimaryElement();
+  }
+
+}

Added: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewReducerWizard.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewReducerWizard.java?view=auto&rev=566838
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewReducerWizard.java (added)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewReducerWizard.java Thu Aug 16 13:43:12 2007
@@ -0,0 +1,192 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse;
+
+import java.io.IOException;
+import java.util.Arrays;
+
+import org.eclipse.core.resources.IFile;
+import org.eclipse.core.runtime.CoreException;
+import org.eclipse.core.runtime.FileLocator;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.core.runtime.IStatus;
+import org.eclipse.core.runtime.Path;
+import org.eclipse.jdt.core.IJavaElement;
+import org.eclipse.jdt.core.IType;
+import org.eclipse.jdt.internal.ui.wizards.NewElementWizard;
+import org.eclipse.jdt.ui.wizards.NewTypeWizardPage;
+import org.eclipse.jface.operation.IRunnableWithProgress;
+import org.eclipse.jface.resource.ImageDescriptor;
+import org.eclipse.jface.viewers.IStructuredSelection;
+import org.eclipse.swt.SWT;
+import org.eclipse.swt.layout.GridLayout;
+import org.eclipse.swt.widgets.Composite;
+import org.eclipse.ui.INewWizard;
+import org.eclipse.ui.IWorkbench;
+
+/**
+ * Wizard for creating a new Reducer class (a class that runs the Reduce portion
+ * of a MapReduce job). The class is pre-filled with a template.
+ * 
+ */
+
+public class NewReducerWizard extends NewElementWizard implements INewWizard,
+    IRunnableWithProgress {
+  private Page page;
+
+  public NewReducerWizard() {
+    setWindowTitle("New Reducer");
+  }
+
+  public void run(IProgressMonitor monitor) {
+    try {
+      page.createType(monitor);
+    } catch (CoreException e) {
+      // TODO Auto-generated catch block
+      e.printStackTrace();
+    } catch (InterruptedException e) {
+      // TODO Auto-generated catch block
+      e.printStackTrace();
+    }
+  }
+
+  @Override
+  public void init(IWorkbench workbench, IStructuredSelection selection) {
+    super.init(workbench, selection);
+
+    page = new Page();
+    addPage(page);
+    page.setSelection(selection);
+  }
+
+  public static class Page extends NewTypeWizardPage {
+    public Page() {
+      super(true, "Reducer");
+
+      setTitle("Reducer");
+      setDescription("Create a new Reducer implementation.");
+      try {
+        setImageDescriptor(ImageDescriptor.createFromURL((FileLocator
+            .toFileURL(FileLocator.find(Activator.getDefault().getBundle(),
+                new Path("resources/reducewiz.png"), null)))));
+      } catch (IOException e) {
+        // TODO Auto-generated catch block
+        e.printStackTrace();
+      }
+
+    }
+
+    public void setSelection(IStructuredSelection selection) {
+      initContainerPage(getInitialJavaElement(selection));
+      initTypePage(getInitialJavaElement(selection));
+    }
+
+    @Override
+    public void createType(IProgressMonitor monitor) throws CoreException,
+        InterruptedException {
+      super.createType(monitor);
+    }
+
+    @Override
+    protected void createTypeMembers(IType newType, ImportsManager imports,
+        IProgressMonitor monitor) throws CoreException {
+      super.createTypeMembers(newType, imports, monitor);
+      imports.addImport("java.io.IOException");
+      imports.addImport("org.apache.hadoop.io.WritableComparable");
+      imports.addImport("org.apache.hadoop.mapred.OutputCollector");
+      imports.addImport("org.apache.hadoop.mapred.Reporter");
+      imports.addImport("java.util.Iterator");
+      newType
+          .createMethod(
+              "public void reduce(WritableComparable _key, Iterator values, OutputCollector output, Reporter reporter) throws IOException \n{\n"
+                  + "\t// replace KeyType with the real type of your key\n"
+                  + "\tKeyType key = (KeyType) _key;\n\n"
+                  + "\twhile (values.hasNext()) {\n"
+                  + "\t\t// replace ValueType with the real type of your value\n"
+                  + "\t\tValueType value = (ValueType) values.next();\n\n"
+                  + "\t\t// process value\n" + "\t}\n" + "}\n", null, false,
+              monitor);
+    }
+
+    public void createControl(Composite parent) {
+      // super.createControl(parent);
+
+      initializeDialogUnits(parent);
+      Composite composite = new Composite(parent, SWT.NONE);
+      GridLayout layout = new GridLayout();
+      layout.numColumns = 4;
+      composite.setLayout(layout);
+
+      createContainerControls(composite, 4);
+      createPackageControls(composite, 4);
+      createSeparator(composite, 4);
+      createTypeNameControls(composite, 4);
+      createSuperClassControls(composite, 4);
+      createSuperInterfacesControls(composite, 4);
+      // createSeparator(composite, 4);
+
+      setControl(composite);
+
+      setSuperClass("org.apache.hadoop.mapred.MapReduceBase", true);
+      setSuperInterfaces(Arrays
+          .asList(new String[] { "org.apache.hadoop.mapred.Reducer" }), true);
+
+      setFocus();
+      validate();
+    }
+
+    @Override
+    protected void handleFieldChanged(String fieldName) {
+      super.handleFieldChanged(fieldName);
+
+      validate();
+    }
+
+    private void validate() {
+      updateStatus(new IStatus[] { fContainerStatus, fPackageStatus,
+          fTypeNameStatus, fSuperClassStatus, fSuperInterfacesStatus });
+    }
+  }
+
+  @Override
+  public boolean performFinish() {
+    if (super.performFinish()) {
+      if (getCreatedElement() != null) {
+        selectAndReveal(page.getModifiedResource());
+        openResource((IFile) page.getModifiedResource());
+      }
+
+      return true;
+    } else {
+      return false;
+    }
+  }
+
+  @Override
+  protected void finishPage(IProgressMonitor monitor)
+      throws InterruptedException, CoreException {
+    this.run(monitor);
+  }
+
+  @Override
+  public IJavaElement getCreatedElement() {
+    return (page.getCreatedType() == null) ? null : page.getCreatedType()
+        .getPrimaryElement();
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/PropertyTester.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/PropertyTester.java?view=auto&rev=566838
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/PropertyTester.java (added)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/PropertyTester.java Thu Aug 16 13:43:12 2007
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse;
+
+import java.util.logging.Logger;
+
+/**
+ * Class to help with debugging properties
+ */
+public class PropertyTester extends
+    org.eclipse.core.expressions.PropertyTester {
+
+  static Logger log = Logger.getLogger(PropertyTester.class.getName());
+
+  public PropertyTester() {
+  }
+
+  public boolean test(Object receiver, String property, Object[] args,
+      Object expectedValue) {
+    log.fine("Test property " + property + ", " + receiver.getClass());
+
+    return true;
+
+    // todo(jz) support test for deployable if module has hadoop nature etc.
+  }
+
+}

Added: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/DfsAction.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/DfsAction.java?view=auto&rev=566838
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/DfsAction.java (added)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/DfsAction.java Thu Aug 16 13:43:12 2007
@@ -0,0 +1,275 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.actions;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.lang.reflect.InvocationTargetException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.eclipse.dfs.DfsFile;
+import org.apache.hadoop.eclipse.dfs.DfsFolder;
+import org.apache.hadoop.eclipse.dfs.DfsPath;
+import org.eclipse.core.internal.runtime.AdapterManager;
+import org.eclipse.core.resources.IStorage;
+import org.eclipse.core.runtime.CoreException;
+import org.eclipse.core.runtime.IPath;
+import org.eclipse.core.runtime.Path;
+import org.eclipse.jface.action.IAction;
+import org.eclipse.jface.dialogs.MessageDialog;
+import org.eclipse.jface.resource.ImageDescriptor;
+import org.eclipse.jface.viewers.ISelection;
+import org.eclipse.jface.viewers.IStructuredSelection;
+import org.eclipse.swt.widgets.DirectoryDialog;
+import org.eclipse.swt.widgets.Display;
+import org.eclipse.swt.widgets.Shell;
+import org.eclipse.ui.IObjectActionDelegate;
+import org.eclipse.ui.IPersistableElement;
+import org.eclipse.ui.ISharedImages;
+import org.eclipse.ui.IStorageEditorInput;
+import org.eclipse.ui.IWorkbenchPart;
+import org.eclipse.ui.PartInitException;
+import org.eclipse.ui.PlatformUI;
+
+import com.jcraft.jsch.JSchException;
+import com.jcraft.jsch.SftpException;
+
+public class DfsAction implements IObjectActionDelegate {
+
+  private ISelection selection;
+
+  private IWorkbenchPart targetPart;
+
+  /* @inheritDoc */
+  public void setActivePart(IAction action, IWorkbenchPart targetPart) {
+    this.targetPart = targetPart;
+  }
+
+  /* @inheritDoc */
+  public void run(IAction action) {
+
+    // Ignore non structured selections
+    if (!(this.selection instanceof IStructuredSelection))
+      return;
+
+    IStructuredSelection ss = (IStructuredSelection) selection;
+    String actionId = action.getActionDefinitionId();
+    try {
+      if (actionId.equals("dfs.delete"))
+        delete(ss);
+      else if (actionId.equals("dfs.open"))
+        open(ss);
+      else if (actionId.equals("dfs.put"))
+        put(ss);
+      else if (actionId.equals("dfs.refresh"))
+        refresh(ss);
+      else if (actionId.equals("dfs.get"))
+        get(ss);
+
+    } catch (Exception e) {
+      Shell shell = new Shell();
+      e.printStackTrace();
+      MessageDialog.openError(shell, "DFS Error",
+          "An error occurred while performing DFS operation: "
+              + e.getMessage());
+    }
+
+  }
+
+  /**
+   * Implement the import action (upload files from the current machine to
+   * HDFS)
+   * 
+   * @param object
+   * @throws SftpException
+   * @throws JSchException
+   * @throws InvocationTargetException
+   * @throws InterruptedException
+   */
+  private void put(IStructuredSelection selection) throws SftpException,
+      JSchException, InvocationTargetException, InterruptedException {
+
+    // Ask the user which local directory to upload
+    DirectoryDialog dialog =
+        new DirectoryDialog(Display.getCurrent().getActiveShell());
+    dialog.setText("Copy from local directory");
+    dialog.setMessage("Copy the local directory"
+        + " to the selected directories on the distributed filesystem");
+    String directory = dialog.open();
+
+    if (directory == null)
+      return;
+
+    for (DfsFolder folder : filterSelection(DfsFolder.class, selection))
+      folder.put(directory);
+  }
+
+  /**
+   * Implements the Download action from HDFS to the current machine
+   * 
+   * @param object
+   * @throws SftpException
+   * @throws JSchException
+   */
+  private void get(IStructuredSelection selection) throws SftpException,
+      JSchException {
+
+    // Ask the user where to put the downloaded files
+    DirectoryDialog dialog =
+        new DirectoryDialog(Display.getCurrent().getActiveShell());
+    dialog.setText("Copy to local directory");
+    dialog.setMessage("Copy the selected files and directories from the "
+        + "distributed filesystem to a local directory");
+    String directory = dialog.open();
+
+    if (directory == null)
+      return;
+
+    for (DfsPath path : filterSelection(DfsPath.class, selection)) {
+      try {
+        path.downloadToLocalDirectory(directory);
+      } catch (Exception e) {
+        // nothing we want to do here, ignore
+        e.printStackTrace();
+      }
+    }
+
+  }
+
+  /**
+   * Open the selected DfsPath in the editor window
+   * 
+   * @param selection
+   * @throws JSchException
+   * @throws IOException
+   * @throws PartInitException
+   * @throws InvocationTargetException
+   * @throws InterruptedException
+   */
+  private void open(IStructuredSelection selection) throws JSchException,
+      IOException, PartInitException, InvocationTargetException,
+      InterruptedException {
+
+    for (final DfsFile path : filterSelection(DfsFile.class, selection)) {
+
+      final InputStream data = path.open();
+      if (data == null)
+        continue;
+
+      final IStorage storage = new IStorage() {
+        public Object getAdapter(Class adapter) {
+          return AdapterManager.getDefault().getAdapter(this, adapter);
+        }
+
+        public boolean isReadOnly() {
+          return true;
+        }
+
+        public String getName() {
+          return path.toString();
+        }
+
+        public IPath getFullPath() {
+          return new Path(path.toString());
+        }
+
+        public InputStream getContents() throws CoreException {
+          return data;
+        }
+      };
+
+      IStorageEditorInput storageEditorInput = new IStorageEditorInput() {
+        public Object getAdapter(Class adapter) {
+          return null;
+        }
+
+        public String getToolTipText() {
+          return "";
+        }
+
+        public IPersistableElement getPersistable() {
+          return null;
+        }
+
+        public String getName() {
+          return path.toString();
+        }
+
+        public ImageDescriptor getImageDescriptor() {
+          return PlatformUI.getWorkbench().getSharedImages()
+              .getImageDescriptor(ISharedImages.IMG_OBJ_FILE);
+        }
+
+        public boolean exists() {
+          return true;
+        }
+
+        public IStorage getStorage() throws CoreException {
+          return storage;
+        }
+      };
+
+      targetPart.getSite().getWorkbenchWindow().getActivePage().openEditor(
+          storageEditorInput, "org.eclipse.ui.DefaultTextEditor");
+    }
+  }
+
+  private void refresh(IStructuredSelection selection) throws JSchException {
+    for (DfsPath path : filterSelection(DfsPath.class, selection))
+      path.refresh();
+
+  }
+
+  private void delete(IStructuredSelection selection) throws JSchException {
+    List<DfsPath> list = filterSelection(DfsPath.class, selection);
+    if (list.isEmpty())
+      return;
+
+    if (MessageDialog.openConfirm(null, "Confirm Delete from DFS",
+        "Are you sure you want to delete " + list + " from the DFS?")) {
+      for (DfsPath path : list)
+        path.delete();
+    }
+  }
+
+  /* @inheritDoc */
+  public void selectionChanged(IAction action, ISelection selection) {
+    this.selection = selection;
+  }
+
+  /**
+   * Extract the list of <T> from the structured selection
+   * 
+   * @param clazz the class T
+   * @param selection the structured selection
+   * @return the list of <T> it contains
+   */
+  private <T> List<T> filterSelection(Class<T> clazz,
+      IStructuredSelection selection) {
+    List<T> list = new ArrayList<T>();
+    for (Object obj : selection.toList()) {
+      if (clazz.isAssignableFrom(obj.getClass())) {
+        list.add((T) obj);
+      }
+    }
+    return list;
+  }
+
+}

Added: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/EditServerAction.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/EditServerAction.java?view=auto&rev=566838
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/EditServerAction.java (added)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/EditServerAction.java Thu Aug 16 13:43:12 2007
@@ -0,0 +1,68 @@
+package org.apache.hadoop.eclipse.actions;
+
+import java.io.IOException;
+
+import org.apache.hadoop.eclipse.Activator;
+import org.apache.hadoop.eclipse.server.HadoopServer;
+import org.apache.hadoop.eclipse.servers.DefineHadoopServerLocWizardPage;
+import org.apache.hadoop.eclipse.view.servers.ServerView;
+import org.eclipse.core.runtime.FileLocator;
+import org.eclipse.core.runtime.Path;
+import org.eclipse.jface.action.Action;
+import org.eclipse.jface.resource.ImageDescriptor;
+import org.eclipse.jface.wizard.Wizard;
+import org.eclipse.jface.wizard.WizardDialog;
+
+/**
+ * Editing server properties action
+ */
+public class EditServerAction extends Action {
+
+  private ServerView serverView;
+
+  public EditServerAction(ServerView serverView) {
+    this.serverView = serverView;
+
+    setText("Edit Hadoop Server");
+    try {
+      // TODO Edit server icon
+      setImageDescriptor(ImageDescriptor.createFromURL((FileLocator
+          .toFileURL(FileLocator.find(Activator.getDefault().getBundle(),
+              new Path("resources/hadoop_small.gif"), null)))));
+    } catch (IOException e) {
+      /* Ignore if no image */
+      e.printStackTrace();
+    }
+  }
+
+  @Override
+  public void run() {
+
+    final HadoopServer server = serverView.getSelectedServer();
+    if (server == null)
+      return;
+
+    WizardDialog dialog = new WizardDialog(null, new Wizard() {
+      private DefineHadoopServerLocWizardPage page =
+          new DefineHadoopServerLocWizardPage(server);
+
+      @Override
+      public void addPages() {
+        super.addPages();
+        setWindowTitle("Edit Hadoop Server Location");
+        addPage(page);
+      }
+
+      @Override
+      public boolean performFinish() {
+        return (page.performFinish() != null);
+      }
+    });
+
+    dialog.create();
+    dialog.setBlockOnOpen(true);
+    dialog.open();
+
+    super.run();
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/NewServerAction.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/NewServerAction.java?view=auto&rev=566838
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/NewServerAction.java (added)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/NewServerAction.java Thu Aug 16 13:43:12 2007
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.actions;
+
+import java.io.IOException;
+
+import org.apache.hadoop.eclipse.Activator;
+import org.apache.hadoop.eclipse.servers.DefineHadoopServerLocWizardPage;
+import org.eclipse.core.runtime.FileLocator;
+import org.eclipse.core.runtime.Path;
+import org.eclipse.jface.action.Action;
+import org.eclipse.jface.resource.ImageDescriptor;
+import org.eclipse.jface.wizard.Wizard;
+import org.eclipse.jface.wizard.WizardDialog;
+
+
+/**
+ * Action corresponding to creating a new MapReduce Server.
+ */
+
+public class NewServerAction extends Action {
+  public NewServerAction() {
+    setText("New Hadoop Server");
+    try {
+      // TODO decorate with + sign to indicate create
+      setImageDescriptor(ImageDescriptor.createFromURL((FileLocator
+          .toFileURL(FileLocator.find(Activator.getDefault().getBundle(),
+              new Path("resources/hadoop_small.gif"), null)))));
+    } catch (IOException e) {
+      /* Ignore if no image */
+      e.printStackTrace();
+    }
+  }
+
+  @Override
+  public void run() {
+    WizardDialog dialog = new WizardDialog(null, new Wizard() {
+      private DefineHadoopServerLocWizardPage page = new DefineHadoopServerLocWizardPage();
+
+      @Override
+      public void addPages() {
+        super.addPages();
+        setWindowTitle("New Hadoop Server Location");
+        addPage(page);
+      }
+
+      @Override
+      public boolean performFinish() {
+        return page.performFinish() != null;
+      }
+
+    });
+
+    dialog.create();
+    dialog.setBlockOnOpen(true);
+    dialog.open();
+
+    super.run();
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/OpenNewMRClassWizardAction.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/OpenNewMRClassWizardAction.java?view=auto&rev=566838
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/OpenNewMRClassWizardAction.java (added)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/OpenNewMRClassWizardAction.java Thu Aug 16 13:43:12 2007
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.actions;
+
+import java.util.logging.Logger;
+
+import org.apache.hadoop.eclipse.NewDriverWizard;
+import org.apache.hadoop.eclipse.NewMapperWizard;
+import org.apache.hadoop.eclipse.NewReducerWizard;
+import org.eclipse.jface.action.Action;
+import org.eclipse.jface.viewers.StructuredSelection;
+import org.eclipse.jface.window.Window;
+import org.eclipse.jface.wizard.WizardDialog;
+import org.eclipse.ui.INewWizard;
+import org.eclipse.ui.IWorkbench;
+import org.eclipse.ui.PlatformUI;
+import org.eclipse.ui.cheatsheets.ICheatSheetAction;
+import org.eclipse.ui.cheatsheets.ICheatSheetManager;
+
+
+/**
+ * Action to open a new MapReduce Class.
+ */
+
+public class OpenNewMRClassWizardAction extends Action implements
+    ICheatSheetAction {
+
+  static Logger log = Logger.getLogger(OpenNewMRClassWizardAction.class
+      .getName());
+
+  public void run(String[] params, ICheatSheetManager manager) {
+
+    if ((params != null) && (params.length > 0)) {
+      IWorkbench workbench = PlatformUI.getWorkbench();
+      INewWizard wizard = getWizard(params[0]);
+      wizard.init(workbench, new StructuredSelection());
+      WizardDialog dialog = new WizardDialog(PlatformUI.getWorkbench()
+          .getActiveWorkbenchWindow().getShell(), wizard);
+      dialog.create();
+      dialog.open();
+
+      // did the wizard succeed ?
+      notifyResult(dialog.getReturnCode() == Window.OK);
+    }
+  }
+
+  private INewWizard getWizard(String typeName) {
+    if (typeName.equals("Mapper")) {
+      return new NewMapperWizard();
+    } else if (typeName.equals("Reducer")) {
+      return new NewReducerWizard();
+    } else if (typeName.equals("Driver")) {
+      return new NewDriverWizard();
+    } else {
+      log.severe("Invalid Wizard requested");
+      return null;
+    }
+  }
+
+}

Added: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/OpenNewMRProjectAction.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/OpenNewMRProjectAction.java?view=auto&rev=566838
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/OpenNewMRProjectAction.java (added)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/OpenNewMRProjectAction.java Thu Aug 16 13:43:12 2007
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.actions;
+
+import org.apache.hadoop.eclipse.NewMapReduceProjectWizard;
+import org.eclipse.jface.action.Action;
+import org.eclipse.jface.viewers.StructuredSelection;
+import org.eclipse.jface.window.Window;
+import org.eclipse.jface.wizard.WizardDialog;
+import org.eclipse.swt.widgets.Shell;
+import org.eclipse.ui.IWorkbench;
+import org.eclipse.ui.PlatformUI;
+
+
+/**
+ * Action to open a new MapReduce project.
+ */
+
+public class OpenNewMRProjectAction extends Action {
+
+  @Override
+  public void run() {
+    IWorkbench workbench = PlatformUI.getWorkbench();
+    Shell shell = workbench.getActiveWorkbenchWindow().getShell();
+    NewMapReduceProjectWizard wizard = new NewMapReduceProjectWizard();
+    wizard.init(workbench, new StructuredSelection());
+    WizardDialog dialog = new WizardDialog(shell, wizard);
+    dialog.create();
+    dialog.open();
+    // did the wizard succeed?
+    notifyResult(dialog.getReturnCode() == Window.OK);
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/RunOnHadoopActionDelegate.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/RunOnHadoopActionDelegate.java?view=auto&rev=566838
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/RunOnHadoopActionDelegate.java (added)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/RunOnHadoopActionDelegate.java Thu Aug 16 13:43:12 2007
@@ -0,0 +1,102 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.actions;
+
+import org.apache.hadoop.eclipse.server.JarModule;
+import org.apache.hadoop.eclipse.servers.RunOnHadoopWizard;
+import org.eclipse.core.resources.IResource;
+import org.eclipse.core.runtime.IAdaptable;
+import org.eclipse.jface.action.IAction;
+import org.eclipse.jface.dialogs.MessageDialog;
+import org.eclipse.jface.viewers.ISelection;
+import org.eclipse.jface.viewers.IStructuredSelection;
+import org.eclipse.jface.wizard.IWizard;
+import org.eclipse.jface.wizard.WizardDialog;
+import org.eclipse.swt.widgets.Display;
+import org.eclipse.swt.widgets.Shell;
+import org.eclipse.ui.actions.ActionDelegate;
+
+/**
+ * Allows a resource to be associated with the "Run on Hadoop" action in the
+ * Run menu. Only files, not directories, may be run on Hadoop. The file
+ * needs to have a main method. When the "Run on Hadoop" action is called,
+ * launch the RunOnHadoop Dialog.
+ */
+
+public class RunOnHadoopActionDelegate extends ActionDelegate {
+
+  private ISelection selection;
+
+  @Override
+  public void selectionChanged(IAction action, ISelection selection) {
+    this.selection = selection;
+  }
+
+  @Override
+  public void run(IAction action) {
+    if ((selection == null)
+        || (!(selection instanceof IStructuredSelection)))
+      return;
+
+    IStructuredSelection issel = (IStructuredSelection) selection;
+
+    if (issel.size() != 1)
+      return;
+
+    Object selected = issel.getFirstElement();
+    if (!(selected instanceof IAdaptable))
+      return;
+
+    IAdaptable adaptable = (IAdaptable) selected;
+
+    IResource resource = (IResource) adaptable.getAdapter(IResource.class);
+
+    // 63561: only allow run-on on file resources
+    if ((resource != null) && (resource.getType() == IResource.FILE)) {
+      RunOnHadoopWizard wizard =
+          new RunOnHadoopWizard(new JarModule(resource));
+
+      WizardDialog dialog = new Dialog(null, wizard);
+      dialog.create();
+      dialog.setBlockOnOpen(true);
+      dialog.open();
+
+      return;
+    }
+
+    MessageDialog
+        .openInformation(Display.getDefault().getActiveShell(),
+            "No Main method found",
+            "Please select a file with a main method to Run on a MapReduce server");
+  }
+
+  static class Dialog extends WizardDialog {
+    public Dialog(Shell parentShell, IWizard newWizard) {
+      super(parentShell, newWizard);
+    }
+
+    @Override
+    public void create() {
+      super.create();
+
+      ((RunOnHadoopWizard) getWizard())
+          .setProgressMonitor(getProgressMonitor());
+    }
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/ActionProvider.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/ActionProvider.java?view=auto&rev=566838
==============================================================================
--- lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/ActionProvider.java (added)
+++ lucene/hadoop/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/ActionProvider.java Thu Aug 16 13:43:12 2007
@@ -0,0 +1,177 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.eclipse.dfs;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.eclipse.jface.action.Action;
+import org.eclipse.jface.action.IMenuManager;
+import org.eclipse.jface.resource.ImageDescriptor;
+import org.eclipse.jface.viewers.ISelection;
+import org.eclipse.jface.viewers.IStructuredSelection;
+import org.eclipse.ui.IActionBars;
+import org.eclipse.ui.ISharedImages;
+import org.eclipse.ui.PlatformUI;
+import org.eclipse.ui.actions.ActionFactory;
+import org.eclipse.ui.navigator.CommonActionProvider;
+import org.eclipse.ui.navigator.ICommonActionConstants;
+import org.eclipse.ui.navigator.ICommonActionExtensionSite;
+import org.eclipse.ui.navigator.ICommonMenuConstants;
+import org.eclipse.ui.plugin.AbstractUIPlugin;
+
+/**
+ * Allows the user to delete and refresh items in the DFS tree
+ */
+
+public class ActionProvider extends CommonActionProvider {
+
+  private ICommonActionExtensionSite site;
+
+  private Map<String, ImageDescriptor> descriptors =
+      new HashMap<String, ImageDescriptor>();
+
+  public ActionProvider() {
+  }
+
+  /* @inheritDoc */
+  @Override
+  public void init(ICommonActionExtensionSite site) {
+    super.init(site);
+    this.site = site;
+
+    descriptors
+        .put("dfs.delete", PlatformUI.getWorkbench().getSharedImages()
+            .getImageDescriptor(ISharedImages.IMG_TOOL_DELETE));
+    descriptors.put("dfs.refresh", AbstractUIPlugin
+        .imageDescriptorFromPlugin("org.eclipse.core.tools.resources",
+            "icons/refresh.gif"));
+    // NOTE(jz)
+    // pretty brittle, but worst case no image
+    // descriptors.put("dfs.put",
+    // NavigatorPlugin.imageDescriptorFromPlugin("org.eclipse.core.tools.resources",
+    // "icons/refresh.gif"));
+  }
+
+  /* @inheritDoc */
+  @Override
+  public void fillActionBars(IActionBars actionBars) {
+    actionBars.setGlobalActionHandler(ActionFactory.DELETE.getId(),
+        new DfsAction("dfs.delete", "Delete"));
+    actionBars.setGlobalActionHandler(ActionFactory.REFRESH.getId(),
+        new DfsAction("dfs.refresh", "Refresh"));
+
+    if ((this.site != null)
+        && (this.site.getStructuredViewer().getSelection() instanceof IStructuredSelection)
+        && (((IStructuredSelection) this.site.getStructuredViewer()
+            .getSelection()).size() == 1)
+        && (((IStructuredSelection) this.site.getStructuredViewer()
+            .getSelection()).getFirstElement() instanceof DfsFile)) {
+      actionBars.setGlobalActionHandler(ICommonActionConstants.OPEN,
+          new DfsAction("dfs.open", "View"));
+    }
+
+    actionBars.updateActionBars();
+  }
+
+  /* @inheritDoc */
+  @Override
+  public void fillContextMenu(IMenuManager menu) {
+    menu.appendToGroup(ICommonMenuConstants.GROUP_EDIT, new DfsAction(
+        "dfs.delete", "Delete"));
+    menu.appendToGroup(ICommonMenuConstants.GROUP_EDIT, new DfsAction(
+        "dfs.refresh", "Refresh"));
+
+    menu.appendToGroup(ICommonMenuConstants.GROUP_NEW, new DfsAction(
+        "dfs.get", "Download to local directory..."));
+
+    if (this.site == null)
+      return;
+
+    ISelection isel = this.site.getStructuredViewer().getSelection();
+    if (!(isel instanceof IStructuredSelection))
+      return;
+
+    IStructuredSelection issel = (IStructuredSelection) isel;
+    if (issel.size() != 1)
+      return;
+
+    Object element = issel.getFirstElement();
+
+    if (element instanceof DfsFile) {
+      menu.appendToGroup(ICommonMenuConstants.GROUP_OPEN, new DfsAction(
+          "dfs.open", "View"));
+
+    } else if (element instanceof DfsFolder) {
+      menu.appendToGroup(ICommonMenuConstants.GROUP_NEW, new DfsAction(
+          "dfs.put", "Import from local directory..."));
+    }
+  }
+
+  /**
+   * 
+   */
+  public class DfsAction extends Action {
+
+    private final String actionDefinition;
+
+    private final String title;
+
+    public DfsAction(String actionDefinition, String title) {
+      this.actionDefinition = actionDefinition;
+      this.title = title;
+
+    }
+
+    @Override
+    public String getText() {
+      return this.title;
+    }
+
+    @Override
+    public ImageDescriptor getImageDescriptor() {
+      if (descriptors.containsKey(getActionDefinitionId())) {
+        return (ImageDescriptor) descriptors.get(getActionDefinitionId());
+      } else {
+        return null;
+      }
+    }
+
+    @Override
+    public String getActionDefinitionId() {
+      return actionDefinition;
+    }
+
+    @Override
+    public void run() {
+      org.apache.hadoop.eclipse.actions.DfsAction action =
+          new org.apache.hadoop.eclipse.actions.DfsAction();
+      action.setActivePart(this, PlatformUI.getWorkbench()
+          .getActiveWorkbenchWindow().getActivePage().getActivePart());
+      action.selectionChanged(this, site.getStructuredViewer()
+          .getSelection());
+      action.run(this);
+    }
+
+    @Override
+    public boolean isEnabled() {
+      return true;
+    }
+  }
+}



Mime
View raw message