falcon-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From rag...@apache.org
Subject git commit: FALCON-701 HadoopUtil and Util classes documented. Contributed by Paul Isaychuk
Date Thu, 02 Oct 2014 19:39:12 GMT
Repository: incubator-falcon
Updated Branches:
  refs/heads/master 0db23f42f -> 4e614ff1f


FALCON-701 HadoopUtil and Util classes documented. Contributed by Paul Isaychuk


Project: http://git-wip-us.apache.org/repos/asf/incubator-falcon/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-falcon/commit/4e614ff1
Tree: http://git-wip-us.apache.org/repos/asf/incubator-falcon/tree/4e614ff1
Diff: http://git-wip-us.apache.org/repos/asf/incubator-falcon/diff/4e614ff1

Branch: refs/heads/master
Commit: 4e614ff1f8e60f0db092d9e08c4769c84cab04b1
Parents: 0db23f4
Author: Raghav Kumar Gautam <raghav@apache.org>
Authored: Thu Oct 2 12:38:48 2014 -0700
Committer: Raghav Kumar Gautam <raghav@apache.org>
Committed: Thu Oct 2 12:38:48 2014 -0700

----------------------------------------------------------------------
 falcon-regression/CHANGES.txt                   |   3 +
 .../falcon/regression/core/util/HadoopUtil.java | 200 ++++++++++++++-----
 .../falcon/regression/core/util/Util.java       | 171 ++++++++++++++--
 3 files changed, 313 insertions(+), 61 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4e614ff1/falcon-regression/CHANGES.txt
----------------------------------------------------------------------
diff --git a/falcon-regression/CHANGES.txt b/falcon-regression/CHANGES.txt
index 573e8f6..4746950 100644
--- a/falcon-regression/CHANGES.txt
+++ b/falcon-regression/CHANGES.txt
@@ -12,6 +12,9 @@ Trunk (Unreleased)
    via Samarth Gupta)
 
   IMPROVEMENTS
+   FALCON-701 HadoopUtil and Util classes documented (Paul Isaychuk via
+   Raghav Kumar Gautam)
+
    FALCON-750 Method name fixed (Paul Isaychuk via Ruslan Ostafiychuk)
 
    FALCON-645 add test to falcon regression for change made in default behaviour 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4e614ff1/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/HadoopUtil.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/HadoopUtil.java
b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/HadoopUtil.java
index af5c01a..024a652 100644
--- a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/HadoopUtil.java
+++ b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/HadoopUtil.java
@@ -44,14 +44,20 @@ public final class HadoopUtil {
     private HadoopUtil() {
         throw new AssertionError("Instantiating utility class...");
     }
-    public static List<String> getAllFilesHDFS(FileSystem fs, Path location) throws
IOException {
 
+    /**
+     * Retrieves all file names contained in a given directory.
+     * @param fs filesystem
+     * @param location given directory
+     * @return list of file names
+     * @throws IOException
+     */
+    public static List<String> getAllFilesHDFS(FileSystem fs, Path location) throws
IOException {
         List<String> files = new ArrayList<String>();
         if (!fs.exists(location)) {
             return files;
         }
         FileStatus[] stats = fs.listStatus(location);
-
         for (FileStatus stat : stats) {
             if (!isDir(stat)) {
                 files.add(stat.getPath().toString());
@@ -60,13 +66,18 @@ public final class HadoopUtil {
         return files;
     }
 
+    /**
+     * Retrieves all directories withing a given depth starting from a specific dir.
+     * @param fs filesystem
+     * @param location given dir
+     * @param depth depth
+     * @return all matching directories
+     * @throws IOException
+     */
     public static List<Path> getAllDirsRecursivelyHDFS(
         FileSystem fs, Path location, int depth) throws IOException {
-
         List<Path> returnList = new ArrayList<Path>();
-
         FileStatus[] stats = fs.listStatus(location);
-
         for (FileStatus stat : stats) {
             if (isDir(stat)) {
                 returnList.add(stat.getPath());
@@ -74,17 +85,20 @@ public final class HadoopUtil {
                     returnList.addAll(getAllDirsRecursivelyHDFS(fs, stat.getPath(), depth
- 1));
                 }
             }
-
         }
-
         return returnList;
     }
 
+    /**
+     * Recursively retrieves all data file names from a given location.
+     * @param fs filesystem
+     * @param location given location
+     * @return list of all files
+     * @throws IOException
+     */
     public static List<Path> getAllFilesRecursivelyHDFS(
         FileSystem fs, Path location) throws IOException {
-
         List<Path> returnList = new ArrayList<Path>();
-
         FileStatus[] stats;
         try {
             stats = fs.listStatus(location);
@@ -92,12 +106,10 @@ public final class HadoopUtil {
             e.printStackTrace();
             return new ArrayList<Path>();
         }
-
         if (stats == null) {
             return returnList;
         }
         for (FileStatus stat : stats) {
-
             if (!isDir(stat)) {
                 if (!stat.getPath().toUri().toString().contains("_SUCCESS")) {
                     returnList.add(stat.getPath());
@@ -106,9 +118,7 @@ public final class HadoopUtil {
                 returnList.addAll(getAllFilesRecursivelyHDFS(fs, stat.getPath()));
             }
         }
-
         return returnList;
-
     }
 
     @SuppressWarnings("deprecation")
@@ -116,6 +126,13 @@ public final class HadoopUtil {
         return stat.isDir();
     }
 
+    /**
+     * Copies file from local place to hdfs location.
+     * @param fs target filesystem
+     * @param dstHdfsDir destination
+     * @param srcFileLocation source location
+     * @throws IOException
+     */
     public static void copyDataToFolder(final FileSystem fs, final String dstHdfsDir,
                                         final String srcFileLocation)
         throws IOException {
@@ -124,6 +141,13 @@ public final class HadoopUtil {
         fs.copyFromLocalFile(new Path(srcFileLocation), new Path(dstHdfsDir));
     }
 
+    /**
+     * Copies a whole directory to hdfs.
+     * @param fs target filesystem
+     * @param dstHdfsDir destination dir
+     * @param localLocation source location
+     * @throws IOException
+     */
     public static void uploadDir(final FileSystem fs, final String dstHdfsDir,
                                  final String localLocation)
         throws IOException {
@@ -133,76 +157,96 @@ public final class HadoopUtil {
         HadoopUtil.copyDataToFolder(fs, dstHdfsDir, localLocation);
     }
 
+    /**
+     * Lists names of given directory subfolders.
+     * @param fs filesystem
+     * @param baseDir given directory
+     * @return list of subfolders
+     * @throws IOException
+     */
     public static List<String> getHDFSSubFoldersName(FileSystem fs,
                                                      String baseDir) throws IOException {
-
         List<String> returnList = new ArrayList<String>();
-
         FileStatus[] stats = fs.listStatus(new Path(baseDir));
-
-
         for (FileStatus stat : stats) {
             if (isDir(stat)) {
                 returnList.add(stat.getPath().getName());
             }
-
         }
-
-
         return returnList;
     }
 
+    /**
+     * Checks if file is present in given directory.
+     * @param fs filesystem
+     * @param hdfsPath path to a given directory
+     * @param fileToCheckFor file
+     * @return either file present or not
+     * @throws IOException
+     */
     public static boolean isFilePresentHDFS(FileSystem fs, String hdfsPath, String fileToCheckFor)
         throws IOException {
-
         LOGGER.info("getting file from folder: " + hdfsPath);
-
         List<String> fileNames = getAllFileNamesFromHDFS(fs, hdfsPath);
-
         for (String filePath : fileNames) {
-
             if (filePath.contains(fileToCheckFor)) {
                 return true;
             }
         }
-
         return false;
     }
 
+    /**
+     * Lists all file names for a given directory.
+     * @param fs filesystem
+     * @param hdfsPath path to a given directory
+     * @return list of files which given directory contains
+     * @throws IOException
+     */
     private static List<String> getAllFileNamesFromHDFS(
         FileSystem fs, String hdfsPath) throws IOException {
-
         List<String> returnList = new ArrayList<String>();
-
         LOGGER.info("getting file from folder: " + hdfsPath);
         FileStatus[] stats = fs.listStatus(new Path(hdfsPath));
-
         for (FileStatus stat : stats) {
             String currentPath = stat.getPath().toUri().getPath(); // gives directory name
             if (!isDir(stat)) {
                 returnList.add(currentPath);
             }
-
-
         }
         return returnList;
     }
 
+    /**
+     * Removes directory with a given name and creates empty one with the same name.
+     * @param fs filesystem
+     * @param path path to a directory
+     * @throws IOException
+     */
     public static void recreateDir(FileSystem fs, String path) throws IOException {
-
         deleteDirIfExists(path, fs);
         LOGGER.info("creating hdfs dir: " + path + " on " + fs.getConf().get("fs.default.name"));
         fs.mkdirs(new Path(path));
-
     }
 
+    /**
+     * Recreates dirs for a list of filesystems.
+     * @param fileSystems list of filesystems
+     * @param path path to a directory
+     * @throws IOException
+     */
     public static void recreateDir(List<FileSystem> fileSystems, String path) throws
IOException {
-
         for (FileSystem fs : fileSystems) {
             recreateDir(fs, path);
         }
     }
 
+    /**
+     * Removes given directory from a filesystem.
+     * @param hdfsPath path to a given directory
+     * @param fs filesystem
+     * @throws IOException
+     */
     public static void deleteDirIfExists(String hdfsPath, FileSystem fs) throws IOException
{
         Path path = new Path(hdfsPath);
         if (fs.exists(path)) {
@@ -214,11 +258,27 @@ public final class HadoopUtil {
         }
     }
 
+    /**
+     * Copies data in folders without prefix.
+     * @param fs filesystem
+     * @param inputPath source location
+     * @param remoteLocations destination location
+     * @throws IOException
+     */
     public static void flattenAndPutDataInFolder(FileSystem fs, String inputPath,
                                                  List<String> remoteLocations) throws
IOException {
         flattenAndPutDataInFolder(fs, inputPath, "", remoteLocations);
     }
 
+    /**
+     * Copies files from a source directory to target directories on hdfs.
+     * @param fs target filesystem
+     * @param inputPath source location
+     * @param remotePathPrefix prefix for target directories
+     * @param remoteLocations target directories
+     * @return list of exact locations where data was copied
+     * @throws IOException
+     */
     public static List<String> flattenAndPutDataInFolder(FileSystem fs, String inputPath,
                                                  String remotePathPrefix,
                                                  List<String> remoteLocations) throws
IOException {
@@ -236,7 +296,6 @@ public final class HadoopUtil {
                 filePaths.add(filePath);
             }
         }
-
         if (!remotePathPrefix.endsWith("/") && !remoteLocations.get(0).startsWith("/"))
{
             remotePathPrefix += "/";
         }
@@ -253,14 +312,20 @@ public final class HadoopUtil {
             if (!fs.exists(new Path(remoteLocation))) {
                 fs.mkdirs(new Path(remoteLocation));
             }
-
             fs.copyFromLocalFile(false, true, filePaths.toArray(new Path[filePaths.size()]),
                 new Path(remoteLocation));
         }
         return locations;
     }
 
-
+    /**
+     * Copies data from local sources to remote directories.
+     * @param fs target filesystem
+     * @param folderPrefix prefix for remote directories
+     * @param folderList remote directories
+     * @param fileLocations sources
+     * @throws IOException
+     */
     public static void copyDataToFolders(FileSystem fs, final String folderPrefix,
         List<String> folderList, String... fileLocations) throws IOException {
         for (final String folder : folderList) {
@@ -273,7 +338,6 @@ public final class HadoopUtil {
                     LOGGER.info("file could not be created");
                 }
             }
-
             FileWriter fr = new FileWriter(f);
             fr.append("folder");
             fr.close();
@@ -282,7 +346,6 @@ public final class HadoopUtil {
             if (!r) {
                 LOGGER.info("delete was not successful");
             }
-
             Path[] srcPaths = new Path[fileLocations.length];
             for (int i = 0; i < srcPaths.length; ++i) {
                 srcPaths[i] = new Path(fileLocations[i]);
@@ -293,6 +356,14 @@ public final class HadoopUtil {
         }
     }
 
+    /**
+     * Uploads data to remote directories with names within date ranges.
+     * @param fs target filesystem
+     * @param interval dates ranges before and after current date
+     * @param minuteSkip time to skip within a range to get intermediate directories
+     * @param folderPrefix prefix for remote directories
+     * @throws IOException
+     */
     public static void lateDataReplenish(FileSystem fs, int interval,
         int minuteSkip, String folderPrefix) throws IOException {
         List<String> folderData = TimeUtil.getMinuteDatesOnEitherSide(interval, minuteSkip);
@@ -300,6 +371,13 @@ public final class HadoopUtil {
         flattenAndPutDataInFolder(fs, OSUtil.NORMAL_INPUT, folderPrefix, folderData);
     }
 
+    /**
+     * Creates list of folders on remote filesystem.
+     * @param fs remote filesystem
+     * @param folderPrefix prefix for remote directories
+     * @param folderList list of folders
+     * @throws IOException
+     */
     public static void createFolders(FileSystem fs, final String folderPrefix,
                                              List<String> folderList) throws IOException
{
         for (final String folder : folderList) {
@@ -307,6 +385,13 @@ public final class HadoopUtil {
         }
     }
 
+    /**
+     * Created folders in remote location according to current time and copies files here.
+     * @param fs target filesystem
+     * @param remoteLocation remote location
+     * @param localLocation source
+     * @throws IOException
+     */
     public static void injectMoreData(FileSystem fs, final String remoteLocation,
                                       String localLocation) throws IOException {
         File[] files = new File(localLocation).listFiles();
@@ -321,49 +406,70 @@ public final class HadoopUtil {
 
     }
 
+    /**
+     * Uploads either _SUCCESS or log_01.txt file to remote directories with names within
date
+     * ranges.
+     * @param fs target filesystem
+     * @param interval dates ranges before and after current date
+     * @param minuteSkip time to skip within a range to get intermediate directories
+     * @param folderPrefix prefix for remote directories
+     * @param fileToBePut what file to copy to remote locations
+     * @throws IOException
+     */
     public static void putFileInFolderHDFS(FileSystem fs, int interval, int minuteSkip,
                                            String folderPrefix, String fileToBePut)
         throws IOException {
         List<String> folderPaths = TimeUtil.getMinuteDatesOnEitherSide(interval, minuteSkip);
         LOGGER.info("folderData: " + folderPaths.toString());
-
         createFolders(fs, folderPrefix, folderPaths);
-
         if (fileToBePut.equals("_SUCCESS")) {
             copyDataToFolders(fs, folderPrefix, folderPaths, OSUtil.NORMAL_INPUT + "_SUCCESS");
         } else {
             copyDataToFolders(fs, folderPrefix, folderPaths, OSUtil.NORMAL_INPUT + "log_01.txt");
         }
-
     }
 
+    /**
+     * Uploads log_01.txt file to remote directories with names within date ranges.
+     * @param fs target filesystem
+     * @param interval dates ranges before and after current date
+     * @param minuteSkip time to skip within a range to get intermediate directories
+     * @param folderPrefix prefix for remote directories
+     * @param postFix postfix for remote locations
+     * @throws IOException
+     */
     public static void lateDataReplenishWithoutSuccess(FileSystem fs, int interval,
         int minuteSkip, String folderPrefix, String postFix) throws IOException {
         List<String> folderPaths = TimeUtil.getMinuteDatesOnEitherSide(interval, minuteSkip);
         LOGGER.info("folderData: " + folderPaths.toString());
-
         if (postFix != null) {
             for (int i = 0; i < folderPaths.size(); i++) {
                 folderPaths.set(i, folderPaths.get(i) + postFix);
             }
         }
-
         createFolders(fs, folderPrefix, folderPaths);
-        copyDataToFolders(fs, folderPrefix, folderPaths,
-                OSUtil.NORMAL_INPUT + "log_01.txt");
+        copyDataToFolders(fs, folderPrefix, folderPaths, OSUtil.NORMAL_INPUT + "log_01.txt");
     }
 
+    /**
+     * Uploads both log_01.txt and _SUCCESS files to remote directories with names within
date
+     * ranges.
+     * @param fs target filesystem
+     * @param interval dates ranges before and after current date
+     * @param minuteSkip time to skip within a range to get intermediate directories
+     * @param folderPrefix prefix for remote directories
+     * @param postFix postfix for remote locations
+     * @throws IOException
+     */
     public static void lateDataReplenish(FileSystem fs, int interval, int minuteSkip,
                                          String folderPrefix, String postFix) throws IOException
{
         List<String> folderPaths = TimeUtil.getMinuteDatesOnEitherSide(interval, minuteSkip);
         LOGGER.info("folderData: " + folderPaths.toString());
-
         if (postFix != null) {
             for (int i = 0; i < folderPaths.size(); i++) {
                 folderPaths.set(i, folderPaths.get(i) + postFix);
             }
         }
-
         createFolders(fs, folderPrefix, folderPaths);
         copyDataToFolders(fs, folderPrefix, folderPaths,
             OSUtil.NORMAL_INPUT + "_SUCCESS", OSUtil.NORMAL_INPUT + "log_01.txt");

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4e614ff1/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/Util.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/Util.java
b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/Util.java
index 8fdf65a..30eaf61 100644
--- a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/Util.java
+++ b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/Util.java
@@ -90,18 +90,34 @@ public final class Util {
     }
     private static final Logger LOGGER = Logger.getLogger(Util.class);
 
+    /**
+     * Sends request without data and user.
+     */
     public static ServiceResponse sendRequest(String url, String method)
         throws IOException, URISyntaxException, AuthenticationException {
         return sendRequest(url, method, null, null);
     }
 
+    /**
+     * Sends api request without data.
+     */
     public static ServiceResponse sendRequest(String url, String method, String user)
         throws IOException, URISyntaxException, AuthenticationException {
         return sendRequest(url, method, null, user);
     }
 
-    public static ServiceResponse sendRequest(String url, String method, String data,
-                                              String user)
+    /**
+     * Sends api requests.
+     * @param url target url
+     * @param method request method
+     * @param data data to be places in body of request
+     * @param user user to be used to send request
+     * @return api response
+     * @throws IOException
+     * @throws URISyntaxException
+     * @throws AuthenticationException
+     */
+    public static ServiceResponse sendRequest(String url, String method, String data, String
user)
         throws IOException, URISyntaxException, AuthenticationException {
         BaseRequest request = new BaseRequest(url, method, user, data);
         request.addHeader(RequestKeys.CONTENT_TYPE_HEADER, RequestKeys.XML_CONTENT_TYPE);
@@ -109,15 +125,29 @@ public final class Util {
         return new ServiceResponse(response);
     }
 
+    /**
+     * @param data process definition
+     * @return process name
+     */
     public static String getProcessName(String data) {
         ProcessMerlin processElement = new ProcessMerlin(data);
         return processElement.getName();
     }
 
+    /**
+     * @param data string data
+     * @return is data should be considered as XMl
+     */
     private static boolean isXML(String data) {
         return data != null && data.trim().length() > 0 && data.trim().startsWith("<");
     }
 
+    /**
+     * Converts service response to api result form.
+     * @param response service response
+     * @return api result
+     * @throws JAXBException
+     */
     public static APIResult parseResponse(ServiceResponse response) throws JAXBException
{
         if (!isXML(response.getMessage())) {
             return new APIResult(APIResult.Status.FAILED, response.getMessage(), "somerandomstring",
@@ -144,6 +174,14 @@ public final class Util {
         return temp;
     }
 
+    /**
+     * Lists all directories contained in a store by sub-path.
+     * @param helper cluster where store is present
+     * @param subPath sub-path
+     * @return list of all directories in the sub-path
+     * @throws IOException
+     * @throws JSchException
+     */
     public static List<String> getStoreInfo(IEntityManagerHelper helper, String subPath)
         throws IOException, JSchException {
         if (helper.getStoreLocation().startsWith("hdfs:")) {
@@ -156,6 +194,10 @@ public final class Util {
         }
     }
 
+    /**
+     * @param data entity definition
+     * @return entity name
+     */
     public static String readEntityName(String data) {
         if (data.contains("uri:falcon:feed")) {
             return new FeedMerlin(data).getName();
@@ -166,17 +208,27 @@ public final class Util {
         }
     }
 
+    /**
+     * @return unique string
+     */
     public static String getUniqueString() {
         return "-" + UUID.randomUUID().toString().split("-")[0];
     }
 
+    /**
+     * Retrieves all hadoop data directories from a specific data path.
+     * @param fs filesystem
+     * @param feed feed definition
+     * @param dir specific directory
+     * @return all
+     * @throws IOException
+     */
     public static List<String> getHadoopDataFromDir(FileSystem fs, String feed, String
dir)
         throws IOException {
         List<String> finalResult = new ArrayList<String>();
         String feedPath = getFeedPath(feed);
         int depth = feedPath.split(dir)[1].split("/").length - 1;
-        List<Path> results = HadoopUtil.getAllDirsRecursivelyHDFS(fs,
-            new Path(dir), depth);
+        List<Path> results = HadoopUtil.getAllDirsRecursivelyHDFS(fs, new Path(dir),
depth);
         for (Path result : results) {
             int pathDepth = result.toString().split(dir)[1].split("/").length - 1;
             if (pathDepth == depth) {
@@ -186,6 +238,13 @@ public final class Util {
         return finalResult;
     }
 
+    /**
+     * Sets custom feed property.
+     * @param feed feed definition
+     * @param propertyName custom property name
+     * @param propertyValue custom property value
+     * @return updated feed
+     */
     public static String setFeedProperty(String feed, String propertyName, String propertyValue)
{
         FeedMerlin feedObject = new FeedMerlin(feed);
         boolean found = false;
@@ -206,6 +265,10 @@ public final class Util {
         return feedObject.toString();
     }
 
+    /**
+     * @param feed feed definition
+     * @return feed data path
+     */
     public static String getFeedPath(String feed) {
         FeedMerlin feedObject = new FeedMerlin(feed);
         for (Location location : feedObject.getLocations().getLocations()) {
@@ -213,16 +276,27 @@ public final class Util {
                 return location.getPath();
             }
         }
-
         return null;
     }
 
+    /**
+     * Sets cut-off period.
+     * @param feed feed definition
+     * @param frequency cut-off period
+     * @return updated feed
+     */
     public static String insertLateFeedValue(String feed, Frequency frequency) {
         FeedMerlin feedObject = new FeedMerlin(feed);
         feedObject.getLateArrival().setCutOff(frequency);
         return feedObject.toString();
     }
 
+    /**
+     * Sets data location for a feed.
+     * @param feed feed definition
+     * @param pathValue new path
+     * @return updated feed
+     */
     public static String setFeedPathValue(String feed, String pathValue) {
         FeedMerlin feedObject = new FeedMerlin(feed);
         for (Location location : feedObject.getLocations().getLocations()) {
@@ -233,6 +307,13 @@ public final class Util {
         return feedObject.toString();
     }
 
+    /**
+     * Finds first folder within a date range.
+     * @param startTime start date
+     * @param endTime end date
+     * @param folderList list of folders which are under analysis
+     * @return first matching folder or null if not present in a list
+     */
     public static String findFolderBetweenGivenTimeStamps(DateTime startTime, DateTime endTime,
                                                           List<String> folderList)
{
         DateTimeFormatter formatter = DateTimeFormat.forPattern("yyyy/MM/dd/HH/mm");
@@ -246,12 +327,24 @@ public final class Util {
         return null;
     }
 
+    /**
+     * @param feedString feed definition
+     * @param newName new name
+     * @return feed with updated name
+     */
     public static String setFeedName(String feedString, String newName) {
         FeedMerlin feedObject = new FeedMerlin(feedString);
         feedObject.setName(newName);
         return feedObject.toString().trim();
     }
 
+    /**
+     * Sets name for a cluster by given order number.
+     * @param feedString feed which contains a cluster
+     * @param clusterName new cluster name
+     * @param clusterIndex index of cluster which should be updated
+     * @return feed with cluster name updated
+     */
     public static String setClusterNameInFeed(String feedString, String clusterName,
                                               int clusterIndex) {
         FeedMerlin feedObject = new FeedMerlin(feedString);
@@ -259,6 +352,10 @@ public final class Util {
         return feedObject.toString().trim();
     }
 
+    /**
+     * @param clusterXML cluster definition
+     * @return cluster definition converted to object representation
+     */
     public static ClusterMerlin getClusterObject(String clusterXML) {
         return new ClusterMerlin(clusterXML);
     }
@@ -276,7 +373,6 @@ public final class Util {
         List<String> finalList = new ArrayList<String>();
         for (String line : raw) {
             finalList.add(line.split(",")[0]);
-
         }
         return finalList;
     }
@@ -299,6 +395,12 @@ public final class Util {
         return finalList;
     }
 
+    /**
+     * Shuts down falcon server on a given host using sudo credentials.
+     * @param helper given host
+     * @throws IOException
+     * @throws JSchException
+     */
     public static void shutDownService(IEntityManagerHelper helper)
         throws IOException, JSchException {
         ExecUtil.runRemoteScriptAsSudo(helper.getQaHost(), helper.getUsername(),
@@ -307,6 +409,14 @@ public final class Util {
         TimeUtil.sleepSeconds(10);
     }
 
+    /**
+     * Start falcon server on a given host using sudo credentials and checks if it succeeds.
+     * @param helper given host
+     * @throws IOException
+     * @throws JSchException
+     * @throws AuthenticationException
+     * @throws URISyntaxException
+     */
     public static void startService(IEntityManagerHelper helper)
         throws IOException, JSchException, AuthenticationException, URISyntaxException {
         ExecUtil.runRemoteScriptAsSudo(helper.getQaHost(), helper.getUsername(),
@@ -327,18 +437,34 @@ public final class Util {
         throw new RuntimeException("Service on" + helper.getHostname() + " did not start!");
     }
 
+    /**
+     * Stops and starts falcon service for a given host using sudo credentials.
+     * @param helper given host
+     * @throws IOException
+     * @throws JSchException
+     * @throws AuthenticationException
+     * @throws URISyntaxException
+     */
     public static void restartService(IEntityManagerHelper helper)
         throws IOException, JSchException, AuthenticationException, URISyntaxException {
         LOGGER.info("restarting service for: " + helper.getQaHost());
-
         shutDownService(helper);
         startService(helper);
     }
 
+    /**
+     * @param processData process definition
+     * @return process definition converted to object representation.
+     */
     public static Process getProcessObject(String processData) {
         return new ProcessMerlin(processData);
     }
 
+    /**
+     * Prints JMSConsumer messages content.
+     * @param messageConsumer the source JMSConsumer
+     * @throws JMSException
+     */
     public static void printMessageData(JmsMessageConsumer messageConsumer) throws JMSException
{
         LOGGER.info("dumping all queue data:");
         for (MapMessage mapMessage : messageConsumer.getReceivedMessages()) {
@@ -353,6 +479,12 @@ public final class Util {
         }
     }
 
+    /**
+     * Configures cluster definition according to provided properties.
+     * @param cluster cluster which should be configured
+     * @param prefix current cluster prefix
+     * @return modified cluster definition
+     */
     public static String getEnvClusterXML(String cluster, String prefix) {
         ClusterMerlin clusterObject = getClusterObject(cluster);
         if ((null == prefix) || prefix.isEmpty()) {
@@ -410,6 +542,12 @@ public final class Util {
         return clusterObject.toString();
     }
 
+    /**
+     * Forms property object based on parameters.
+     * @param name property name
+     * @param value property value
+     * @return property object
+     */
     public static org.apache.falcon.entity.v0.cluster.Property
     getFalconClusterPropertyObject(String name, String value) {
         org.apache.falcon.entity.v0.cluster.Property property = new org
@@ -419,6 +557,11 @@ public final class Util {
         return property;
     }
 
+    /**
+     * Get entity type according to its definition.
+     * @param entity entity which is under analysis
+     * @return entity type
+     */
     public static EntityType getEntityType(String entity) {
         if (entity.contains("uri:falcon:process:0.1")) {
             return EntityType.PROCESS;
@@ -431,7 +574,7 @@ public final class Util {
     }
 
     /**
-     * Compares two definitions
+     * Compares two definitions.
      * @param server1 server where 1st definition is stored
      * @param server2 server where 2nd definition is stored
      * @param entity entity which is under analysis
@@ -480,7 +623,7 @@ public final class Util {
     }
 
     /**
-     * @param pathString whole path
+     * @param pathString whole path.
      * @return path to basic data folder
      */
     public static String getPathPrefix(String pathString) {
@@ -488,7 +631,7 @@ public final class Util {
     }
 
     /**
-     * @param path whole path
+     * @param path whole path.
      * @return file name which is retrieved from a path
      */
     public static String getFileNameFromPath(String path) {
@@ -496,7 +639,7 @@ public final class Util {
     }
 
     /**
-     * Defines request type according to request url
+     * Defines request type according to request url.
      * @param url request url
      * @return request type
      */
@@ -527,7 +670,7 @@ public final class Util {
     }
 
     /**
-     * Prints xml in readable form
+     * Prints xml in readable form.
      * @param xmlString xmlString
      * @return formatted xmlString
      */
@@ -553,7 +696,7 @@ public final class Util {
     }
 
     /**
-     * Converts json string to readable form
+     * Converts json string to readable form.
      * @param jsonString json string
      * @return formatted string
      */
@@ -567,7 +710,7 @@ public final class Util {
     }
 
     /**
-     * Prints xml or json in pretty and readable format
+     * Prints xml or json in pretty and readable format.
      * @param str xml or json string
      * @return converted xml or json
      */


Mime
View raw message