ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From rle...@apache.org
Subject [1/2] ambari git commit: AMBARI-11391. Files View Should support NameNode HA (Erik Bergenholtz via rlevas)
Date Wed, 27 May 2015 18:27:56 GMT
Repository: ambari
Updated Branches:
  refs/heads/trunk 1b4bfafad -> e28a9c073


http://git-wip-us.apache.org/repos/asf/ambari/blob/e28a9c07/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
----------------------------------------------------------------------
diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
new file mode 100644
index 0000000..cc329d9
--- /dev/null
+++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
@@ -0,0 +1,451 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.utils.hdfs;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.fs.permission.FsAction;
+import org.apache.hadoop.fs.permission.FsPermission;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.security.PrivilegedExceptionAction;
+import java.util.*;
+
+import org.apache.hadoop.security.UserGroupInformation;
+import org.json.simple.JSONArray;
+
+/**
+ * Hdfs Business Delegate
+ */
+public class HdfsApi {
+private final Configuration conf;
+  private final Map<String, String> authParams;
+
+  private FileSystem fs;
+  private UserGroupInformation ugi;
+
+  /**
+   * Constructor
+   * @param configurationBuilder hdfs configuration builder
+   * @param authParams map of parameters
+   * @throws IOException
+   * @throws InterruptedException
+   */
+  public HdfsApi(ConfigurationBuilder configurationBuilder, String username, AuthConfigurationBuilder
authParams) throws IOException,
+      InterruptedException, HdfsApiException {
+    this.authParams = authParams.build();
+    conf = configurationBuilder.build();
+
+    ugi = UserGroupInformation.createProxyUser(username, getProxyUser());
+
+    fs = ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
+      public FileSystem run() throws IOException {
+        return FileSystem.get(conf);
+      }
+    });
+  }
+
+  private UserGroupInformation getProxyUser() throws IOException {
+    UserGroupInformation proxyuser;
+    if (authParams.containsKey("proxyuser")) {
+      proxyuser = UserGroupInformation.createRemoteUser(authParams.get("proxyuser"));
+    } else {
+      proxyuser = UserGroupInformation.getCurrentUser();
+    }
+
+    proxyuser.setAuthenticationMethod(getAuthenticationMethod());
+    return proxyuser;
+  }
+
+  private UserGroupInformation.AuthenticationMethod getAuthenticationMethod() {
+    UserGroupInformation.AuthenticationMethod authMethod;
+    if (authParams.containsKey("auth")) {
+      String authName = authParams.get("auth");
+      authMethod = UserGroupInformation.AuthenticationMethod.valueOf(authName.toUpperCase());
+    } else {
+      authMethod = UserGroupInformation.AuthenticationMethod.SIMPLE;
+    }
+    return authMethod;
+  }
+
+  /**
+   * List dir operation
+   * @param path path
+   * @return array of FileStatus objects
+   * @throws FileNotFoundException
+   * @throws IOException
+   * @throws InterruptedException
+   */
+  public FileStatus[] listdir(final String path) throws FileNotFoundException,
+      IOException, InterruptedException {
+    return ugi.doAs(new PrivilegedExceptionAction<FileStatus[]>() {
+      public FileStatus[] run() throws FileNotFoundException, Exception {
+        return fs.listStatus(new Path(path));
+      }
+    });
+  }
+
+  /**
+   * Get file status
+   * @param path path
+   * @return file status
+   * @throws IOException
+   * @throws FileNotFoundException
+   * @throws InterruptedException
+   */
+  public FileStatus getFileStatus(final String path) throws IOException,
+      FileNotFoundException, InterruptedException {
+    return ugi.doAs(new PrivilegedExceptionAction<FileStatus>() {
+      public FileStatus run() throws FileNotFoundException, IOException {
+        return fs.getFileStatus(new Path(path));
+      }
+    });
+  }
+
+  /**
+   * Make directory
+   * @param path path
+   * @return success
+   * @throws IOException
+   * @throws InterruptedException
+   */
+  public boolean mkdir(final String path) throws IOException,
+      InterruptedException {
+    return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
+      public Boolean run() throws Exception {
+        return fs.mkdirs(new Path(path));
+      }
+    });
+  }
+
+  /**
+   * Rename
+   * @param src source path
+   * @param dst destination path
+   * @return success
+   * @throws IOException
+   * @throws InterruptedException
+   */
+  public boolean rename(final String src, final String dst) throws IOException,
+      InterruptedException {
+    return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
+      public Boolean run() throws Exception {
+        return fs.rename(new Path(src), new Path(dst));
+      }
+    });
+  }
+
+  /**
+   * Check is trash enabled
+   * @return true if trash is enabled
+   * @throws Exception
+   */
+  public boolean trashEnabled() throws Exception {
+    return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
+      public Boolean run() throws IOException {
+        Trash tr = new Trash(fs, conf);
+        return tr.isEnabled();
+      }
+    });
+  }
+
+  /**
+   * Home directory
+   * @return home directory
+   * @throws Exception
+   */
+  public Path getHomeDir() throws Exception {
+    return ugi.doAs(new PrivilegedExceptionAction<Path>() {
+      public Path run() throws IOException {
+        return fs.getHomeDirectory();
+      }
+    });
+  }
+
+  /**
+   * Hdfs Status
+   * @return home directory
+   * @throws Exception
+   */
+  public synchronized FsStatus getStatus() throws Exception {
+    return ugi.doAs(new PrivilegedExceptionAction<FsStatus>() {
+      public FsStatus run() throws IOException {
+        return fs.getStatus();
+      }
+    });
+  }
+
+  /**
+   * Trash directory
+   * @return trash directory
+   * @throws Exception
+   */
+  public Path getTrashDir() throws Exception {
+    return ugi.doAs(new PrivilegedExceptionAction<Path>() {
+      public Path run() throws IOException {
+        TrashPolicy trashPolicy = TrashPolicy.getInstance(conf, fs,
+            fs.getHomeDirectory());
+        return trashPolicy.getCurrentTrashDir().getParent();
+      }
+    });
+  }
+
+  /**
+   * Trash directory path.
+   *
+   * @return trash directory path
+   * @throws Exception
+   */
+  public String getTrashDirPath() throws Exception {
+    Path trashDir = getTrashDir();
+
+    return trashDir.toUri().getRawPath();
+  }
+
+  /**
+   * Trash directory path.
+   *
+   * @param    filePath        the path to the file
+   * @return trash directory path for the file
+   * @throws Exception
+   */
+  public String getTrashDirPath(String filePath) throws Exception {
+    String trashDirPath = getTrashDirPath();
+
+    Path path = new Path(filePath);
+    trashDirPath = trashDirPath + "/" + path.getName();
+
+    return trashDirPath;
+  }
+
+  /**
+   * Empty trash
+   * @return
+   * @throws Exception
+   */
+  public Void emptyTrash() throws Exception {
+    return ugi.doAs(new PrivilegedExceptionAction<Void>() {
+      public Void run() throws IOException {
+        Trash tr = new Trash(fs, conf);
+        tr.expunge();
+        return null;
+      }
+    });
+  }
+
+  /**
+   * Move to trash
+   * @param path path
+   * @return success
+   * @throws IOException
+   * @throws InterruptedException
+   */
+  public boolean moveToTrash(final String path) throws IOException,
+      InterruptedException {
+    return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
+      public Boolean run() throws Exception {
+        return Trash.moveToAppropriateTrash(fs, new Path(path), conf);
+      }
+    });
+  }
+
+  /**
+   * Delete
+   * @param path path
+   * @param recursive delete recursive
+   * @return success
+   * @throws IOException
+   * @throws InterruptedException
+   */
+  public boolean delete(final String path, final boolean recursive)
+      throws IOException, InterruptedException {
+    return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
+      public Boolean run() throws Exception {
+        return fs.delete(new Path(path), recursive);
+      }
+    });
+  }
+
+  /**
+   * Create file
+   * @param path path
+   * @param overwrite overwrite existent file
+   * @return output stream
+   * @throws IOException
+   * @throws InterruptedException
+   */
+  public FSDataOutputStream create(final String path, final boolean overwrite)
+      throws IOException, InterruptedException {
+    return ugi.doAs(new PrivilegedExceptionAction<FSDataOutputStream>() {
+      public FSDataOutputStream run() throws Exception {
+        return fs.create(new Path(path), overwrite);
+      }
+    });
+  }
+
+  /**
+   * Open file
+   * @param path path
+   * @return input stream
+   * @throws IOException
+   * @throws InterruptedException
+   */
+  public FSDataInputStream open(final String path) throws IOException,
+      InterruptedException {
+    return ugi.doAs(new PrivilegedExceptionAction<FSDataInputStream>() {
+      public FSDataInputStream run() throws Exception {
+        return fs.open(new Path(path));
+      }
+    });
+  }
+
+  /**
+   * Change permissions
+   * @param path path
+   * @param permissions permissions in format rwxrwxrwx
+   * @throws IOException
+   * @throws InterruptedException
+   */
+  public boolean chmod(final String path, final String permissions) throws IOException,
+      InterruptedException {
+    return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
+      public Boolean run() throws Exception {
+        try {
+          fs.setPermission(new Path(path), FsPermission.valueOf(permissions));
+        } catch (Exception ex) {
+          return false;
+        }
+        return true;
+      }
+    });
+  }
+
+  /**
+   * Copy file
+   * @param src source path
+   * @param dest destination path
+   * @throws java.io.IOException
+   * @throws InterruptedException
+   */
+  public synchronized void copy(final String src, final String dest) throws IOException,
InterruptedException, HdfsApiException {
+    boolean result = ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
+      public Boolean run() throws Exception {
+        return FileUtil.copy(fs, new Path(src), fs, new Path(dest), false, conf);
+      }
+    });
+
+    if (!result) {
+      throw new HdfsApiException("HDFS010 Can't copy source file from \" + src + \" to \"
+ dest");
+    }
+  }
+
+  public synchronized boolean exists(final String newFilePath) throws IOException, InterruptedException
{
+    return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
+      public Boolean run() throws Exception {
+        return fs.exists(new Path(newFilePath));
+      }
+    });
+  }
+
+  /**
+   * Converts a Hadoop permission into a Unix permission symbolic representation
+   * (i.e. -rwxr--r--) or default if the permission is NULL.
+   *
+   * @param p
+   *          Hadoop permission.
+   * @return the Unix permission symbolic representation or default if the
+   *         permission is NULL.
+   */
+  private static String permissionToString(FsPermission p) {
+    return (p == null) ? "default" : "-" + p.getUserAction().SYMBOL
+        + p.getGroupAction().SYMBOL + p.getOtherAction().SYMBOL;
+  }
+
+  /**
+   * Converts a Hadoop <code>FileStatus</code> object into a JSON array object.
+   * It replaces the <code>SCHEME://HOST:PORT</code> of the path with the
+   * specified URL.
+   * <p/>
+   *
+   * @param status
+   *          Hadoop file status.
+   * @return The JSON representation of the file status.
+   */
+  public Map<String, Object> fileStatusToJSON(FileStatus status) {
+    Map<String, Object> json = new LinkedHashMap<String, Object>();
+    json.put("path", Path.getPathWithoutSchemeAndAuthority(status.getPath())
+        .toString());
+    json.put("replication", status.getReplication());
+    json.put("isDirectory", status.isDirectory());
+    json.put("len", status.getLen());
+    json.put("owner", status.getOwner());
+    json.put("group", status.getGroup());
+    json.put("permission", permissionToString(status.getPermission()));
+    json.put("accessTime", status.getAccessTime());
+    json.put("modificationTime", status.getModificationTime());
+    json.put("blockSize", status.getBlockSize());
+    json.put("replication", status.getReplication());
+    json.put("readAccess", checkAccessPermissions(status, FsAction.READ, ugi));
+    json.put("writeAccess", checkAccessPermissions(status, FsAction.WRITE, ugi));
+    json.put("executeAccess", checkAccessPermissions(status, FsAction.EXECUTE, ugi));
+    return json;
+  }
+
+  /**
+   * Converts a Hadoop <code>FileStatus</code> array into a JSON array object.
+   * It replaces the <code>SCHEME://HOST:PORT</code> of the path with the
+   * specified URL.
+   * <p/>
+   *
+   * @param status
+   *          Hadoop file status array.
+   * @return The JSON representation of the file status array.
+   */
+  @SuppressWarnings("unchecked")
+  public JSONArray fileStatusToJSON(FileStatus[] status) {
+    JSONArray json = new JSONArray();
+    if (status != null) {
+      for (FileStatus s : status) {
+        json.add(fileStatusToJSON(s));
+      }
+    }
+    return json;
+  }
+
+  public static boolean checkAccessPermissions(FileStatus stat, FsAction mode, UserGroupInformation
ugi) {
+    FsPermission perm = stat.getPermission();
+    String user = ugi.getShortUserName();
+    List<String> groups = Arrays.asList(ugi.getGroupNames());
+    if (user.equals(stat.getOwner())) {
+      if (perm.getUserAction().implies(mode)) {
+        return true;
+      }
+    } else if (groups.contains(stat.getGroup())) {
+      if (perm.getGroupAction().implies(mode)) {
+        return true;
+      }
+    } else {
+      if (perm.getOtherAction().implies(mode)) {
+        return true;
+      }
+    }
+    return false;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e28a9c07/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApiException.java
----------------------------------------------------------------------
diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApiException.java
b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApiException.java
new file mode 100644
index 0000000..896bad4
--- /dev/null
+++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApiException.java
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.utils.hdfs;
+
+public class HdfsApiException extends Exception {
+  public HdfsApiException(String message) {
+    super(message);
+  }
+
+  public HdfsApiException(String message, Throwable cause) {
+    super(message, cause);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e28a9c07/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java
----------------------------------------------------------------------
diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java
b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java
new file mode 100644
index 0000000..12226ce
--- /dev/null
+++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java
@@ -0,0 +1,150 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.utils.hdfs;
+
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.commons.io.IOUtils;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+public class HdfsUtil {
+  private final static Logger LOG =
+      LoggerFactory.getLogger(HdfsUtil.class);
+
+  /**
+   * Write string to file with overwriting
+   * @param filePath path to file
+   * @param content new content of file
+   */
+  public static void putStringToFile(HdfsApi hdfs, String filePath, String content) throws
HdfsApiException {
+    FSDataOutputStream stream;
+    try {
+      synchronized (hdfs) {
+        stream = hdfs.create(filePath, true);
+        stream.writeBytes(content);
+        stream.close();
+      }
+    } catch (IOException e) {
+      throw new HdfsApiException("HDFS020 Could not write file " + filePath, e);
+    } catch (InterruptedException e) {
+      throw new HdfsApiException("HDFS021 Could not write file " + filePath, e);
+    }
+  }
+
+  /**
+   * Read string from file
+   * @param filePath path to file
+   */
+  public static String readFile(HdfsApi hdfs, String filePath) throws HdfsApiException {
+    FSDataInputStream stream;
+    try {
+      stream = hdfs.open(filePath);
+      return IOUtils.toString(stream);
+    } catch (IOException e) {
+      throw new HdfsApiException("HDFS060 Could not read file " + filePath, e);
+    } catch (InterruptedException e) {
+      throw new HdfsApiException("HDFS061 Could not read file " + filePath, e);
+    }
+  }
+
+
+  /**
+   * Increment index appended to filename until find first unallocated file
+   * @param fullPathAndFilename path to file and prefix for filename
+   * @param extension file extension
+   * @return if fullPathAndFilename="/tmp/file",extension=".txt" then filename will be like
"/tmp/file_42.txt"
+   */
+  public static String findUnallocatedFileName(HdfsApi hdfs, String fullPathAndFilename,
String extension)
+      throws HdfsApiException {
+    int triesCount = 0;
+    String newFilePath;
+    boolean isUnallocatedFilenameFound;
+
+    try {
+      do {
+        newFilePath = String.format(fullPathAndFilename + "%s" + extension, (triesCount ==
0) ? "" : "_" + triesCount);
+        LOG.debug("Trying to find free filename " + newFilePath);
+
+        isUnallocatedFilenameFound = !hdfs.exists(newFilePath);
+        if (isUnallocatedFilenameFound) {
+          LOG.debug("File created successfully!");
+        }
+
+        triesCount += 1;
+        if (triesCount > 1000) {
+          throw new HdfsApiException("HDFS100 Can't find unallocated file name " + fullPathAndFilename
+ "...");
+        }
+      } while (!isUnallocatedFilenameFound);
+    } catch (IOException e) {
+      throw new HdfsApiException("HDFS030 Error in creation " + fullPathAndFilename + "...",
e);
+    } catch (InterruptedException e) {
+      throw new HdfsApiException("HDFS031 Error in creation " + fullPathAndFilename + "...",
e);
+    }
+
+    return newFilePath;
+  }
+
+  /**
+   * Factory of HdfsApi for specific ViewContext
+   * @param context ViewContext that contains connection credentials
+   * @return HdfsApi object
+   */
+  public static synchronized HdfsApi connectToHDFSApi(ViewContext context) throws HdfsApiException
{
+    HdfsApi api = null;
+    Thread.currentThread().setContextClassLoader(null);
+
+    ConfigurationBuilder configurationBuilder = new ConfigurationBuilder(context);
+    AuthConfigurationBuilder authConfigurationBuilder = new AuthConfigurationBuilder(context);
+
+    try {
+      api = new HdfsApi(configurationBuilder, getHdfsUsername(context), authConfigurationBuilder);
+      LOG.info("HdfsApi connected OK");
+    } catch (IOException e) {
+      String message = "HDFS040 Couldn't open connection to HDFS";
+      LOG.error(message);
+      throw new HdfsApiException(message, e);
+    } catch (InterruptedException e) {
+      String message = "HDFS041 Couldn't open connection to HDFS";
+      LOG.error(message);
+      throw new HdfsApiException(message, e);
+    }
+    return api;
+  }
+
+  /**
+   * Returns username for HdfsApi from "webhdfs.username" property if set,
+   * if not set then current Ambari username
+   * @param context ViewContext
+   * @return username
+   */
+  public static String getHdfsUsername(ViewContext context) {
+    String userName = context.getProperties().get("webhdfs.username");
+    if (userName == null || userName.compareTo("null") == 0 || userName.compareTo("") ==
0) {
+      userName = context.getUsername();
+    }
+    return userName;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e28a9c07/contrib/views/utils/src/test/java/org/apache/ambari/view/utils/ambari/RemoteClusterTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/utils/src/test/java/org/apache/ambari/view/utils/ambari/RemoteClusterTest.java
b/contrib/views/utils/src/test/java/org/apache/ambari/view/utils/ambari/RemoteClusterTest.java
new file mode 100644
index 0000000..ec4df7d
--- /dev/null
+++ b/contrib/views/utils/src/test/java/org/apache/ambari/view/utils/ambari/RemoteClusterTest.java
@@ -0,0 +1,137 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.utils.ambari;
+
+import org.apache.ambari.view.URLStreamProvider;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.cluster.Cluster;
+import org.apache.commons.collections4.map.PassiveExpiringMap;
+import org.easymock.IAnswer;
+import org.json.simple.JSONObject;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import java.io.ByteArrayInputStream;
+import java.io.InputStream;
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.easymock.EasyMock.*;
+import static org.junit.Assert.*;
+
+public class RemoteClusterTest {
+  public static final String AMBARI_CLUSTER_REST_URL = "http://example.com:8080/api/v1/clusters/c1";
+
+  @Rule
+  public ExpectedException thrown= ExpectedException.none();
+
+  @Test
+  public void testGetRemoteClusterThatIsNotPresent() throws Exception {
+    ViewContext viewContext = createNiceMock(ViewContext.class);
+    Map<String, String> instanceProperties = new HashMap<String, String>();
+    expect(viewContext.getProperties()).andReturn(instanceProperties).anyTimes();
+    replay(viewContext);
+
+    AmbariApi ambariApi = new AmbariApi(viewContext);
+    Cluster cluster = ambariApi.getRemoteCluster();
+    assertNull(cluster);
+  }
+
+  @Test
+  public void testGetRemoteClusterNoCredentials() throws Exception {
+    ViewContext viewContext = createNiceMock(ViewContext.class);
+    Map<String, String> instanceProperties = new HashMap<String, String>();
+    instanceProperties.put(AmbariApi.AMBARI_SERVER_URL_INSTANCE_PROPERTY,
+        AMBARI_CLUSTER_REST_URL);
+    expect(viewContext.getProperties()).andReturn(instanceProperties).anyTimes();
+    replay(viewContext);
+
+    thrown.expect(AmbariApiException.class);
+    AmbariApi ambariApi = new AmbariApi(viewContext);
+    Cluster cluster = ambariApi.getRemoteCluster();
+  }
+
+  @Test
+  public void testGetRemoteClusterThatIsPresent() throws Exception {
+    ViewContext viewContext = createNiceMock(ViewContext.class);
+    Map<String, String> instanceProperties = new HashMap<String, String>();
+    instanceProperties.put(AmbariApi.AMBARI_SERVER_URL_INSTANCE_PROPERTY,
+        AMBARI_CLUSTER_REST_URL);
+    instanceProperties.put(AmbariApi.AMBARI_SERVER_USERNAME_INSTANCE_PROPERTY, "admin");
+    instanceProperties.put(AmbariApi.AMBARI_SERVER_PASSWORD_INSTANCE_PROPERTY, "admin");
+    expect(viewContext.getProperties()).andReturn(instanceProperties).anyTimes();
+    replay(viewContext);
+
+    AmbariApi ambariApi = new AmbariApi(viewContext);
+    Cluster cluster = ambariApi.getRemoteCluster();
+    assertNotNull(cluster);
+    assertEquals(cluster.getName(), "c1");
+  }
+
+  @Test
+  public void testGetConfigurationValue() throws Exception {
+    URLStreamProvider urlStreamProvider = createNiceMock(URLStreamProvider.class);
+
+    final String desiredConfigsString = "{\"Clusters\": {\"desired_configs\": {\"test-site\":
{\"tag\": \"TAG\"}}}}";
+    final String configurationString = "{\"items\": [{\"properties\": {\"test.property.name\":
\"test property value\"}}]}";
+    final int[] desiredConfigPolls = {0};
+    final int[] testConfigPolls = {0};
+
+    expect(urlStreamProvider.readFrom(eq(AMBARI_CLUSTER_REST_URL + "?fields=services/ServiceInfo,hosts,Clusters"),
+        eq("GET"), (String) isNull(), (Map<String, String>) anyObject())).andAnswer(new
IAnswer<InputStream>() {
+      @Override
+      public InputStream answer() throws Throwable {
+        desiredConfigPolls[0] += 1;
+        return new ByteArrayInputStream(desiredConfigsString.getBytes());
+      }
+    }).anyTimes();
+
+    expect(urlStreamProvider.readFrom(eq(AMBARI_CLUSTER_REST_URL + "/configurations?(type=test-site&tag=TAG)"),
+        eq("GET"), (String)isNull(), (Map<String, String>) anyObject())).andAnswer(new
IAnswer<InputStream>() {
+      @Override
+      public InputStream answer() throws Throwable {
+        testConfigPolls[0] += 1;
+        return new ByteArrayInputStream(configurationString.getBytes());
+      }
+    }).anyTimes();
+
+    replay(urlStreamProvider);
+
+    RemoteCluster cluster = new RemoteCluster(AMBARI_CLUSTER_REST_URL, urlStreamProvider);
+    PassiveExpiringMap<String, JSONObject> cache = new PassiveExpiringMap<String,
JSONObject>(10000L);
+    cluster.configurationCache = cache;
+
+    String value = cluster.getConfigurationValue("test-site", "test.property.name");
+    assertEquals(value, "test property value");
+    assertEquals(desiredConfigPolls[0], 1);
+    assertEquals(testConfigPolls[0], 1);
+
+    value = cluster.getConfigurationValue("test-site", "test.property.name");
+    assertEquals(value, "test property value");
+    assertEquals(desiredConfigPolls[0], 1);  // cache hit
+    assertEquals(testConfigPolls[0], 1);
+
+    cache.clear();
+    value = cluster.getConfigurationValue("test-site", "test.property.name");
+    assertEquals(value, "test property value");
+    assertEquals(desiredConfigPolls[0], 2);
+    assertEquals(testConfigPolls[0], 2);
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/e28a9c07/contrib/views/utils/src/test/java/org/apache/ambari/view/utils/ambari/URLStreamProviderBasicAuthTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/utils/src/test/java/org/apache/ambari/view/utils/ambari/URLStreamProviderBasicAuthTest.java
b/contrib/views/utils/src/test/java/org/apache/ambari/view/utils/ambari/URLStreamProviderBasicAuthTest.java
new file mode 100644
index 0000000..27e98f2
--- /dev/null
+++ b/contrib/views/utils/src/test/java/org/apache/ambari/view/utils/ambari/URLStreamProviderBasicAuthTest.java
@@ -0,0 +1,159 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.utils.ambari;
+
+import org.apache.ambari.view.URLStreamProvider;
+import org.easymock.EasyMock;
+import org.easymock.IArgumentMatcher;
+
+import org.junit.Test;
+
+import java.io.InputStream;
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.easymock.EasyMock.*;
+
+public class URLStreamProviderBasicAuthTest {
+
+  @Test
+  public void testReadFrom() throws Exception {
+    URLStreamProvider urlStreamProvider = createNiceMock(URLStreamProvider.class);
+    expect(urlStreamProvider.readFrom(anyString(), anyString(), anyString(), HeadersMatcher.mapContainsAuthHeader())).andReturn(null);
+    URLStreamProviderBasicAuth urlStreamProviderBasicAuth =
+        new URLStreamProviderBasicAuth(urlStreamProvider, "user", "pass");
+
+    replay(urlStreamProvider);
+
+    urlStreamProviderBasicAuth.readFrom("http://example.com", "GET",
+        (String) null, null);
+    urlStreamProviderBasicAuth.readFrom("http://example.com", "GET",
+        (String)null, new HashMap<String, String>());
+  }
+
+  @Test
+  public void testReadFrom1() throws Exception {
+    URLStreamProvider urlStreamProvider = createNiceMock(URLStreamProvider.class);
+    expect(urlStreamProvider.readFrom(anyString(), anyString(), (InputStream)anyObject(),
+        HeadersMatcher.mapContainsAuthHeader())).andReturn(null);
+    URLStreamProviderBasicAuth urlStreamProviderBasicAuth =
+        new URLStreamProviderBasicAuth(urlStreamProvider, "user", "pass");
+
+    replay(urlStreamProvider);
+
+    urlStreamProviderBasicAuth.readFrom("http://example.com", "GET",
+        (InputStream) null, null);
+    urlStreamProviderBasicAuth.readFrom("http://example.com", "GET",
+        (InputStream)null, new HashMap<String, String>());
+  }
+
+  @Test
+  public void testReadAs() throws Exception {
+    URLStreamProvider urlStreamProvider = createNiceMock(URLStreamProvider.class);
+    expect(urlStreamProvider.readAs(anyString(), anyString(), anyString(),
+        HeadersMatcher.mapContainsAuthHeader(), anyString())).andReturn(null);
+    URLStreamProviderBasicAuth urlStreamProviderBasicAuth =
+        new URLStreamProviderBasicAuth(urlStreamProvider, "user", "pass");
+
+    replay(urlStreamProvider);
+
+    urlStreamProviderBasicAuth.readAs("http://example.com", "GET",
+        (String) null, null, "admin");
+    urlStreamProviderBasicAuth.readAs("http://example.com", "GET",
+        (String) null, new HashMap<String, String>(), "admin");
+  }
+
+  @Test
+  public void testReadAs1() throws Exception {
+    URLStreamProvider urlStreamProvider = createNiceMock(URLStreamProvider.class);
+    expect(urlStreamProvider.readAs(anyString(), anyString(), (InputStream) anyObject(),
+        HeadersMatcher.mapContainsAuthHeader(), anyString())).andReturn(null);
+    URLStreamProviderBasicAuth urlStreamProviderBasicAuth =
+        new URLStreamProviderBasicAuth(urlStreamProvider, "user", "pass");
+
+    replay(urlStreamProvider);
+
+    urlStreamProviderBasicAuth.readAs("http://example.com", "GET",
+        (InputStream) null, null, "admin");
+    urlStreamProviderBasicAuth.readAs("http://example.com", "GET",
+        (InputStream) null, new HashMap<String, String>(), "admin");
+  }
+
+  @Test
+  public void testReadAsCurrent() throws Exception {
+    URLStreamProvider urlStreamProvider = createNiceMock(URLStreamProvider.class);
+    expect(urlStreamProvider.readAsCurrent(anyString(), anyString(), anyString(),
+        HeadersMatcher.mapContainsAuthHeader())).andReturn(null);
+    URLStreamProviderBasicAuth urlStreamProviderBasicAuth =
+        new URLStreamProviderBasicAuth(urlStreamProvider, "user", "pass");
+
+    replay(urlStreamProvider);
+
+    urlStreamProviderBasicAuth.readAsCurrent("http://example.com", "GET",
+        (String) null, null);
+    urlStreamProviderBasicAuth.readAsCurrent("http://example.com", "GET",
+        (String) null, new HashMap<String, String>());
+  }
+
+  @Test
+  public void testReadAsCurrent1() throws Exception {
+    URLStreamProvider urlStreamProvider = createNiceMock(URLStreamProvider.class);
+    expect(urlStreamProvider.readAsCurrent(anyString(), anyString(), (InputStream) anyObject(),
+        HeadersMatcher.mapContainsAuthHeader())).andReturn(null);
+    URLStreamProviderBasicAuth urlStreamProviderBasicAuth =
+        new URLStreamProviderBasicAuth(urlStreamProvider, "user", "pass");
+
+    replay(urlStreamProvider);
+
+    urlStreamProviderBasicAuth.readAsCurrent("http://example.com", "GET",
+        (InputStream) null, null);
+    urlStreamProviderBasicAuth.readAsCurrent("http://example.com", "GET",
+        (InputStream)null, new HashMap<String, String>());
+  }
+
+
+  public static class HeadersMatcher implements IArgumentMatcher {
+
+    public static Map<String, String> mapContainsAuthHeader() {
+      EasyMock.reportMatcher(new HeadersMatcher());
+      return null;
+    }
+
+    public void appendTo(StringBuffer buffer) {
+      buffer.append("Authentication header matcher");
+    }
+
+    public boolean matches(Object headers) {
+      if (!(headers instanceof Map)) {
+        return false;
+      }
+
+      Map<String, String> headersMap = (Map<String, String>) headers;
+
+      if (!headersMap.containsKey("Authorization"))
+        return false;
+      String authHeader = headersMap.get("Authorization");
+
+      if (!authHeader.startsWith("Basic "))
+        return false;
+
+      return true;
+    }
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/e28a9c07/contrib/views/utils/src/test/java/org/apache/ambari/view/utils/hdfs/ConfigurationBuilderTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/utils/src/test/java/org/apache/ambari/view/utils/hdfs/ConfigurationBuilderTest.java
b/contrib/views/utils/src/test/java/org/apache/ambari/view/utils/hdfs/ConfigurationBuilderTest.java
new file mode 100644
index 0000000..17a9c7f
--- /dev/null
+++ b/contrib/views/utils/src/test/java/org/apache/ambari/view/utils/hdfs/ConfigurationBuilderTest.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.utils.hdfs;
+
+import org.junit.Test;
+
+import java.net.URI;
+
+import static org.junit.Assert.*;
+
+public class ConfigurationBuilderTest {
+  @Test
+  public void testAddProtocolMissing() throws Exception {
+    String normalized = ConfigurationBuilder.addProtocolIfMissing("namenode.example.com:50070");
+    assertEquals(normalized, "webhdfs://namenode.example.com:50070");
+  }
+
+  @Test
+  public void testAddProtocolPresent() throws Exception {
+    String normalized = ConfigurationBuilder.addProtocolIfMissing("webhdfs://namenode.example.com");
+    assertEquals(normalized, "webhdfs://namenode.example.com");
+  }
+
+  @Test
+  public void testAddPortMissing() throws Exception {
+    String normalized = ConfigurationBuilder.addPortIfMissing("webhdfs://namenode.example.com");
+    assertEquals(normalized, "webhdfs://namenode.example.com:50070");
+  }
+
+  @Test
+  public void testAddPortPresent() throws Exception {
+    String normalized = ConfigurationBuilder.addPortIfMissing("webhdfs://namenode.example.com:50070");
+    assertEquals(normalized, "webhdfs://namenode.example.com:50070");
+  }
+}


Mime
View raw message