ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From alexantone...@apache.org
Subject [02/12] ambari git commit: AMBARI-9676. Initial Hive View Submission to Contrib (alexantonenko)
Date Tue, 24 Feb 2015 16:31:55 GMT
http://git-wip-us.apache.org/repos/asf/ambari/blob/f5482d89/contrib/views/hive/src/main/resources/ui/hive-web/vendor/codemirror/codemirror.css
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/vendor/codemirror/codemirror.css b/contrib/views/hive/src/main/resources/ui/hive-web/vendor/codemirror/codemirror.css
new file mode 100644
index 0000000..68c67b1
--- /dev/null
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/vendor/codemirror/codemirror.css
@@ -0,0 +1,309 @@
+/* BASICS */
+
+.CodeMirror {
+  /* Set height, width, borders, and global font properties here */
+  font-family: monospace;
+  height: 300px;
+}
+.CodeMirror-scroll {
+  /* Set scrolling behaviour here */
+  overflow: auto;
+}
+
+/* PADDING */
+
+.CodeMirror-lines {
+  padding: 4px 0; /* Vertical padding around content */
+}
+.CodeMirror pre {
+  padding: 0 4px; /* Horizontal padding of content */
+}
+
+.CodeMirror-scrollbar-filler, .CodeMirror-gutter-filler {
+  background-color: white; /* The little square between H and V scrollbars */
+}
+
+/* GUTTER */
+
+.CodeMirror-gutters {
+  border-right: 1px solid #ddd;
+  background-color: #f7f7f7;
+  white-space: nowrap;
+}
+.CodeMirror-linenumbers {}
+.CodeMirror-linenumber {
+  padding: 0 3px 0 5px;
+  min-width: 20px;
+  text-align: right;
+  color: #999;
+  -moz-box-sizing: content-box;
+  box-sizing: content-box;
+}
+
+.CodeMirror-guttermarker { color: black; }
+.CodeMirror-guttermarker-subtle { color: #999; }
+
+/* CURSOR */
+
+.CodeMirror div.CodeMirror-cursor {
+  border-left: 1px solid black;
+}
+/* Shown when moving in bi-directional text */
+.CodeMirror div.CodeMirror-secondarycursor {
+  border-left: 1px solid silver;
+}
+.CodeMirror.cm-keymap-fat-cursor div.CodeMirror-cursor {
+  width: auto;
+  border: 0;
+  background: #7e7;
+}
+.CodeMirror.cm-keymap-fat-cursor div.CodeMirror-cursors {
+  z-index: 1;
+}
+
+.cm-animate-fat-cursor {
+  width: auto;
+  border: 0;
+  -webkit-animation: blink 1.06s steps(1) infinite;
+  -moz-animation: blink 1.06s steps(1) infinite;
+  animation: blink 1.06s steps(1) infinite;
+}
+@-moz-keyframes blink {
+  0% { background: #7e7; }
+  50% { background: none; }
+  100% { background: #7e7; }
+}
+@-webkit-keyframes blink {
+  0% { background: #7e7; }
+  50% { background: none; }
+  100% { background: #7e7; }
+}
+@keyframes blink {
+  0% { background: #7e7; }
+  50% { background: none; }
+  100% { background: #7e7; }
+}
+
+/* Can style cursor different in overwrite (non-insert) mode */
+div.CodeMirror-overwrite div.CodeMirror-cursor {}
+
+.cm-tab { display: inline-block; text-decoration: inherit; }
+
+.CodeMirror-ruler {
+  border-left: 1px solid #ccc;
+  position: absolute;
+}
+
+/* DEFAULT THEME */
+
+.cm-s-default .cm-keyword {color: #708;}
+.cm-s-default .cm-atom {color: #219;}
+.cm-s-default .cm-number {color: #164;}
+.cm-s-default .cm-def {color: #00f;}
+.cm-s-default .cm-variable,
+.cm-s-default .cm-punctuation,
+.cm-s-default .cm-property,
+.cm-s-default .cm-operator {}
+.cm-s-default .cm-variable-2 {color: #05a;}
+.cm-s-default .cm-variable-3 {color: #085;}
+.cm-s-default .cm-comment {color: #a50;}
+.cm-s-default .cm-string {color: #a11;}
+.cm-s-default .cm-string-2 {color: #f50;}
+.cm-s-default .cm-meta {color: #555;}
+.cm-s-default .cm-qualifier {color: #555;}
+.cm-s-default .cm-builtin {color: #30a;}
+.cm-s-default .cm-bracket {color: #997;}
+.cm-s-default .cm-tag {color: #170;}
+.cm-s-default .cm-attribute {color: #00c;}
+.cm-s-default .cm-header {color: blue;}
+.cm-s-default .cm-quote {color: #090;}
+.cm-s-default .cm-hr {color: #999;}
+.cm-s-default .cm-link {color: #00c;}
+
+.cm-negative {color: #d44;}
+.cm-positive {color: #292;}
+.cm-header, .cm-strong {font-weight: bold;}
+.cm-em {font-style: italic;}
+.cm-link {text-decoration: underline;}
+
+.cm-s-default .cm-error {color: #f00;}
+.cm-invalidchar {color: #f00;}
+
+/* Default styles for common addons */
+
+div.CodeMirror span.CodeMirror-matchingbracket {color: #0f0;}
+div.CodeMirror span.CodeMirror-nonmatchingbracket {color: #f22;}
+.CodeMirror-matchingtag { background: rgba(255, 150, 0, .3); }
+.CodeMirror-activeline-background {background: #e8f2ff;}
+
+/* STOP */
+
+/* The rest of this file contains styles related to the mechanics of
+   the editor. You probably shouldn't touch them. */
+
+.CodeMirror {
+  line-height: 1;
+  position: relative;
+  overflow: hidden;
+  background: white;
+  color: black;
+}
+
+.CodeMirror-scroll {
+  /* 30px is the magic margin used to hide the element's real scrollbars */
+  /* See overflow: hidden in .CodeMirror */
+  margin-bottom: -30px; margin-right: -30px;
+  padding-bottom: 30px;
+  height: 100%;
+  outline: none; /* Prevent dragging from highlighting the element */
+  position: relative;
+  -moz-box-sizing: content-box;
+  box-sizing: content-box;
+}
+.CodeMirror-sizer {
+  position: relative;
+  border-right: 30px solid transparent;
+  -moz-box-sizing: content-box;
+  box-sizing: content-box;
+}
+
+/* The fake, visible scrollbars. Used to force redraw during scrolling
+   before actuall scrolling happens, thus preventing shaking and
+   flickering artifacts. */
+.CodeMirror-vscrollbar, .CodeMirror-hscrollbar, .CodeMirror-scrollbar-filler, .CodeMirror-gutter-filler {
+  position: absolute;
+  z-index: 6;
+  display: none;
+}
+.CodeMirror-vscrollbar {
+  right: 0; top: 0;
+  overflow-x: hidden;
+  overflow-y: scroll;
+}
+.CodeMirror-hscrollbar {
+  bottom: 0; left: 0;
+  overflow-y: hidden;
+  overflow-x: scroll;
+}
+.CodeMirror-scrollbar-filler {
+  right: 0; bottom: 0;
+}
+.CodeMirror-gutter-filler {
+  left: 0; bottom: 0;
+}
+
+.CodeMirror-gutters {
+  position: absolute; left: 0; top: 0;
+  padding-bottom: 30px;
+  z-index: 3;
+}
+.CodeMirror-gutter {
+  white-space: normal;
+  height: 100%;
+  -moz-box-sizing: content-box;
+  box-sizing: content-box;
+  padding-bottom: 30px;
+  margin-bottom: -32px;
+  display: inline-block;
+  /* Hack to make IE7 behave */
+  *zoom:1;
+  *display:inline;
+}
+.CodeMirror-gutter-elt {
+  position: absolute;
+  cursor: default;
+  z-index: 4;
+}
+
+.CodeMirror-lines {
+  cursor: text;
+  min-height: 1px; /* prevents collapsing before first draw */
+}
+.CodeMirror pre {
+  /* Reset some styles that the rest of the page might have set */
+  -moz-border-radius: 0; -webkit-border-radius: 0; border-radius: 0;
+  border-width: 0;
+  background: transparent;
+  font-family: inherit;
+  font-size: inherit;
+  margin: 0;
+  white-space: pre;
+  word-wrap: normal;
+  line-height: inherit;
+  color: inherit;
+  z-index: 2;
+  position: relative;
+  overflow: visible;
+}
+.CodeMirror-wrap pre {
+  word-wrap: break-word;
+  white-space: pre-wrap;
+  word-break: normal;
+}
+
+.CodeMirror-linebackground {
+  position: absolute;
+  left: 0; right: 0; top: 0; bottom: 0;
+  z-index: 0;
+}
+
+.CodeMirror-linewidget {
+  position: relative;
+  z-index: 2;
+  overflow: auto;
+}
+
+.CodeMirror-widget {}
+
+.CodeMirror-wrap .CodeMirror-scroll {
+  overflow-x: hidden;
+}
+
+.CodeMirror-measure {
+  position: absolute;
+  width: 100%;
+  height: 0;
+  overflow: hidden;
+  visibility: hidden;
+}
+.CodeMirror-measure pre { position: static; }
+
+.CodeMirror div.CodeMirror-cursor {
+  position: absolute;
+  border-right: none;
+  width: 0;
+}
+
+div.CodeMirror-cursors {
+  visibility: hidden;
+  position: relative;
+  z-index: 3;
+}
+.CodeMirror-focused div.CodeMirror-cursors {
+  visibility: visible;
+}
+
+.CodeMirror-selected { background: #d9d9d9; }
+.CodeMirror-focused .CodeMirror-selected { background: #d7d4f0; }
+.CodeMirror-crosshair { cursor: crosshair; }
+
+.cm-searching {
+  background: #ffa;
+  background: rgba(255, 255, 0, .4);
+}
+
+/* IE7 hack to prevent it from returning funny offsetTops on the spans */
+.CodeMirror span { *vertical-align: text-bottom; }
+
+/* Used to force a border model for a node */
+.cm-force-border { padding-right: .1px; }
+
+@media print {
+  /* Hide the cursor when printing */
+  .CodeMirror div.CodeMirror-cursors {
+    visibility: hidden;
+  }
+}
+
+/* Help users use markselection to safely style text background */
+span.CodeMirror-selectedtext { background: none; }

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5482d89/contrib/views/hive/src/main/resources/ui/hive-web/vendor/codemirror/show-hint.css
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/vendor/codemirror/show-hint.css b/contrib/views/hive/src/main/resources/ui/hive-web/vendor/codemirror/show-hint.css
new file mode 100644
index 0000000..924e638
--- /dev/null
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/vendor/codemirror/show-hint.css
@@ -0,0 +1,38 @@
+.CodeMirror-hints {
+  position: absolute;
+  z-index: 10;
+  overflow: hidden;
+  list-style: none;
+
+  margin: 0;
+  padding: 2px;
+
+  -webkit-box-shadow: 2px 3px 5px rgba(0,0,0,.2);
+  -moz-box-shadow: 2px 3px 5px rgba(0,0,0,.2);
+  box-shadow: 2px 3px 5px rgba(0,0,0,.2);
+  border-radius: 3px;
+  border: 1px solid silver;
+
+  background: white;
+  font-size: 90%;
+  font-family: monospace;
+
+  max-height: 20em;
+  overflow-y: auto;
+}
+
+.CodeMirror-hint {
+  margin: 0;
+  padding: 0 4px;
+  border-radius: 2px;
+  max-width: 19em;
+  overflow: hidden;
+  white-space: pre;
+  color: black;
+  cursor: pointer;
+}
+
+li.CodeMirror-hint-active {
+  background: #08f;
+  color: white;
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5482d89/contrib/views/hive/src/main/resources/ui/hive-web/vendor/codemirror/show-hint.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/vendor/codemirror/show-hint.js b/contrib/views/hive/src/main/resources/ui/hive-web/vendor/codemirror/show-hint.js
new file mode 100644
index 0000000..27b770b
--- /dev/null
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/vendor/codemirror/show-hint.js
@@ -0,0 +1,389 @@
+// CodeMirror, copyright (c) by Marijn Haverbeke and others
+// Distributed under an MIT license: http://codemirror.net/LICENSE
+
+(function(mod) {
+  if (typeof exports == "object" && typeof module == "object") // CommonJS
+    mod(require("../../lib/codemirror"));
+  else if (typeof define == "function" && define.amd) // AMD
+    define(["../../lib/codemirror"], mod);
+  else // Plain browser env
+    mod(CodeMirror);
+})(function(CodeMirror) {
+  "use strict";
+
+  var HINT_ELEMENT_CLASS        = "CodeMirror-hint";
+  var ACTIVE_HINT_ELEMENT_CLASS = "CodeMirror-hint-active";
+
+  // This is the old interface, kept around for now to stay
+  // backwards-compatible.
+  CodeMirror.showHint = function(cm, getHints, options) {
+    if (!getHints) return cm.showHint(options);
+    if (options && options.async) getHints.async = true;
+    var newOpts = {hint: getHints};
+    if (options) for (var prop in options) newOpts[prop] = options[prop];
+    return cm.showHint(newOpts);
+  };
+
+  CodeMirror.defineExtension("showHint", function(options) {
+    // We want a single cursor position.
+    if (this.listSelections().length > 1 || this.somethingSelected()) return;
+
+    if (this.state.completionActive) this.state.completionActive.close();
+    var completion = this.state.completionActive = new Completion(this, options);
+    var getHints = completion.options.hint;
+    if (!getHints) return;
+
+    CodeMirror.signal(this, "startCompletion", this);
+    if (getHints.async)
+      getHints(this, function(hints) { completion.showHints(hints); }, completion.options);
+    else
+      return completion.showHints(getHints(this, completion.options));
+  });
+
+  function Completion(cm, options) {
+    this.cm = cm;
+    this.options = this.buildOptions(options);
+    this.widget = this.onClose = null;
+  }
+
+  Completion.prototype = {
+    close: function() {
+      if (!this.active()) return;
+      this.cm.state.completionActive = null;
+
+      if (this.widget) this.widget.close();
+      if (this.onClose) this.onClose();
+      CodeMirror.signal(this.cm, "endCompletion", this.cm);
+    },
+
+    active: function() {
+      return this.cm.state.completionActive == this;
+    },
+
+    pick: function(data, i) {
+      var completion = data.list[i];
+      if (completion.hint) completion.hint(this.cm, data, completion);
+      else this.cm.replaceRange(getText(completion), completion.from || data.from,
+                                completion.to || data.to, "complete");
+      CodeMirror.signal(data, "pick", completion);
+      this.close();
+    },
+
+    showHints: function(data) {
+      if (!data || !data.list.length || !this.active()) return this.close();
+
+      if (this.options.completeSingle && data.list.length == 1)
+        this.pick(data, 0);
+      else
+        this.showWidget(data);
+    },
+
+    showWidget: function(data) {
+      this.widget = new Widget(this, data);
+      CodeMirror.signal(data, "shown");
+
+      var debounce = 0, completion = this, finished;
+      var closeOn = this.options.closeCharacters;
+      var startPos = this.cm.getCursor(), startLen = this.cm.getLine(startPos.line).length;
+
+      var requestAnimationFrame = window.requestAnimationFrame || function(fn) {
+        return setTimeout(fn, 1000/60);
+      };
+      var cancelAnimationFrame = window.cancelAnimationFrame || clearTimeout;
+
+      function done() {
+        if (finished) return;
+        finished = true;
+        completion.close();
+        completion.cm.off("cursorActivity", activity);
+        if (data) CodeMirror.signal(data, "close");
+      }
+
+      function update() {
+        if (finished) return;
+        CodeMirror.signal(data, "update");
+        var getHints = completion.options.hint;
+        if (getHints.async)
+          getHints(completion.cm, finishUpdate, completion.options);
+        else
+          finishUpdate(getHints(completion.cm, completion.options));
+      }
+      function finishUpdate(data_) {
+        data = data_;
+        if (finished) return;
+        if (!data || !data.list.length) return done();
+        if (completion.widget) completion.widget.close();
+        completion.widget = new Widget(completion, data);
+      }
+
+      function clearDebounce() {
+        if (debounce) {
+          cancelAnimationFrame(debounce);
+          debounce = 0;
+        }
+      }
+
+      function activity() {
+        clearDebounce();
+        var pos = completion.cm.getCursor(), line = completion.cm.getLine(pos.line);
+        if (pos.line != startPos.line || line.length - pos.ch != startLen - startPos.ch ||
+            pos.ch < startPos.ch || completion.cm.somethingSelected() ||
+            (pos.ch && closeOn.test(line.charAt(pos.ch - 1)))) {
+          completion.close();
+        } else {
+          debounce = requestAnimationFrame(update);
+          if (completion.widget) completion.widget.close();
+        }
+      }
+      this.cm.on("cursorActivity", activity);
+      this.onClose = done;
+    },
+
+    buildOptions: function(options) {
+      var editor = this.cm.options.hintOptions;
+      var out = {};
+      for (var prop in defaultOptions) out[prop] = defaultOptions[prop];
+      if (editor) for (var prop in editor)
+        if (editor[prop] !== undefined) out[prop] = editor[prop];
+      if (options) for (var prop in options)
+        if (options[prop] !== undefined) out[prop] = options[prop];
+      return out;
+    }
+  };
+
+  function getText(completion) {
+    if (typeof completion == "string") return completion;
+    else return completion.text;
+  }
+
+  function buildKeyMap(completion, handle) {
+    var baseMap = {
+      Up: function() {handle.moveFocus(-1);},
+      Down: function() {handle.moveFocus(1);},
+      PageUp: function() {handle.moveFocus(-handle.menuSize() + 1, true);},
+      PageDown: function() {handle.moveFocus(handle.menuSize() - 1, true);},
+      Home: function() {handle.setFocus(0);},
+      End: function() {handle.setFocus(handle.length - 1);},
+      Enter: handle.pick,
+      Tab: handle.pick,
+      Esc: handle.close
+    };
+    var custom = completion.options.customKeys;
+    var ourMap = custom ? {} : baseMap;
+    function addBinding(key, val) {
+      var bound;
+      if (typeof val != "string")
+        bound = function(cm) { return val(cm, handle); };
+      // This mechanism is deprecated
+      else if (baseMap.hasOwnProperty(val))
+        bound = baseMap[val];
+      else
+        bound = val;
+      ourMap[key] = bound;
+    }
+    if (custom)
+      for (var key in custom) if (custom.hasOwnProperty(key))
+        addBinding(key, custom[key]);
+    var extra = completion.options.extraKeys;
+    if (extra)
+      for (var key in extra) if (extra.hasOwnProperty(key))
+        addBinding(key, extra[key]);
+    return ourMap;
+  }
+
+  function getHintElement(hintsElement, el) {
+    while (el && el != hintsElement) {
+      if (el.nodeName.toUpperCase() === "LI" && el.parentNode == hintsElement) return el;
+      el = el.parentNode;
+    }
+  }
+
+  function Widget(completion, data) {
+    this.completion = completion;
+    this.data = data;
+    var widget = this, cm = completion.cm;
+
+    var hints = this.hints = document.createElement("ul");
+    hints.className = "CodeMirror-hints";
+    this.selectedHint = data.selectedHint || 0;
+
+    var completions = data.list;
+    for (var i = 0; i < completions.length; ++i) {
+      var elt = hints.appendChild(document.createElement("li")), cur = completions[i];
+      var className = HINT_ELEMENT_CLASS + (i != this.selectedHint ? "" : " " + ACTIVE_HINT_ELEMENT_CLASS);
+      if (cur.className != null) className = cur.className + " " + className;
+      elt.className = className;
+      if (cur.render) cur.render(elt, data, cur);
+      else elt.appendChild(document.createTextNode(cur.displayText || getText(cur)));
+      elt.hintId = i;
+    }
+
+    var pos = cm.cursorCoords(completion.options.alignWithWord ? data.from : null);
+    var left = pos.left, top = pos.bottom, below = true;
+    hints.style.left = left + "px";
+    hints.style.top = top + "px";
+    // If we're at the edge of the screen, then we want the menu to appear on the left of the cursor.
+    var winW = window.innerWidth || Math.max(document.body.offsetWidth, document.documentElement.offsetWidth);
+    var winH = window.innerHeight || Math.max(document.body.offsetHeight, document.documentElement.offsetHeight);
+    (completion.options.container || document.body).appendChild(hints);
+    var box = hints.getBoundingClientRect(), overlapY = box.bottom - winH;
+    if (overlapY > 0) {
+      var height = box.bottom - box.top, curTop = pos.top - (pos.bottom - box.top);
+      if (curTop - height > 0) { // Fits above cursor
+        hints.style.top = (top = pos.top - height) + "px";
+        below = false;
+      } else if (height > winH) {
+        hints.style.height = (winH - 5) + "px";
+        hints.style.top = (top = pos.bottom - box.top) + "px";
+        var cursor = cm.getCursor();
+        if (data.from.ch != cursor.ch) {
+          pos = cm.cursorCoords(cursor);
+          hints.style.left = (left = pos.left) + "px";
+          box = hints.getBoundingClientRect();
+        }
+      }
+    }
+    var overlapX = box.left - winW;
+    if (overlapX > 0) {
+      if (box.right - box.left > winW) {
+        hints.style.width = (winW - 5) + "px";
+        overlapX -= (box.right - box.left) - winW;
+      }
+      hints.style.left = (left = pos.left - overlapX) + "px";
+    }
+
+    cm.addKeyMap(this.keyMap = buildKeyMap(completion, {
+      moveFocus: function(n, avoidWrap) { widget.changeActive(widget.selectedHint + n, avoidWrap); },
+      setFocus: function(n) { widget.changeActive(n); },
+      menuSize: function() { return widget.screenAmount(); },
+      length: completions.length,
+      close: function() { completion.close(); },
+      pick: function() { widget.pick(); },
+      data: data
+    }));
+
+    if (completion.options.closeOnUnfocus) {
+      var closingOnBlur;
+      cm.on("blur", this.onBlur = function() { closingOnBlur = setTimeout(function() { completion.close(); }, 100); });
+      cm.on("focus", this.onFocus = function() { clearTimeout(closingOnBlur); });
+    }
+
+    var startScroll = cm.getScrollInfo();
+    cm.on("scroll", this.onScroll = function() {
+      var curScroll = cm.getScrollInfo(), editor = cm.getWrapperElement().getBoundingClientRect();
+      var newTop = top + startScroll.top - curScroll.top;
+      var point = newTop - (window.pageYOffset || (document.documentElement || document.body).scrollTop);
+      if (!below) point += hints.offsetHeight;
+      if (point <= editor.top || point >= editor.bottom) return completion.close();
+      hints.style.top = newTop + "px";
+      hints.style.left = (left + startScroll.left - curScroll.left) + "px";
+    });
+
+    CodeMirror.on(hints, "dblclick", function(e) {
+      var t = getHintElement(hints, e.target || e.srcElement);
+      if (t && t.hintId != null) {widget.changeActive(t.hintId); widget.pick();}
+    });
+
+    CodeMirror.on(hints, "click", function(e) {
+      var t = getHintElement(hints, e.target || e.srcElement);
+      if (t && t.hintId != null) {
+        widget.changeActive(t.hintId);
+        if (completion.options.completeOnSingleClick) widget.pick();
+      }
+    });
+
+    CodeMirror.on(hints, "mousedown", function() {
+      setTimeout(function(){cm.focus();}, 20);
+    });
+
+    CodeMirror.signal(data, "select", completions[0], hints.firstChild);
+    return true;
+  }
+
+  Widget.prototype = {
+    close: function() {
+      if (this.completion.widget != this) return;
+      this.completion.widget = null;
+      this.hints.parentNode.removeChild(this.hints);
+      this.completion.cm.removeKeyMap(this.keyMap);
+
+      var cm = this.completion.cm;
+      if (this.completion.options.closeOnUnfocus) {
+        cm.off("blur", this.onBlur);
+        cm.off("focus", this.onFocus);
+      }
+      cm.off("scroll", this.onScroll);
+    },
+
+    pick: function() {
+      this.completion.pick(this.data, this.selectedHint);
+    },
+
+    changeActive: function(i, avoidWrap) {
+      if (i >= this.data.list.length)
+        i = avoidWrap ? this.data.list.length - 1 : 0;
+      else if (i < 0)
+        i = avoidWrap ? 0  : this.data.list.length - 1;
+      if (this.selectedHint == i) return;
+      var node = this.hints.childNodes[this.selectedHint];
+      node.className = node.className.replace(" " + ACTIVE_HINT_ELEMENT_CLASS, "");
+      node = this.hints.childNodes[this.selectedHint = i];
+      node.className += " " + ACTIVE_HINT_ELEMENT_CLASS;
+      if (node.offsetTop < this.hints.scrollTop)
+        this.hints.scrollTop = node.offsetTop - 3;
+      else if (node.offsetTop + node.offsetHeight > this.hints.scrollTop + this.hints.clientHeight)
+        this.hints.scrollTop = node.offsetTop + node.offsetHeight - this.hints.clientHeight + 3;
+      CodeMirror.signal(this.data, "select", this.data.list[this.selectedHint], node);
+    },
+
+    screenAmount: function() {
+      return Math.floor(this.hints.clientHeight / this.hints.firstChild.offsetHeight) || 1;
+    }
+  };
+
+  CodeMirror.registerHelper("hint", "auto", function(cm, options) {
+    var helpers = cm.getHelpers(cm.getCursor(), "hint"), words;
+    if (helpers.length) {
+      for (var i = 0; i < helpers.length; i++) {
+        var cur = helpers[i](cm, options);
+        if (cur && cur.list.length) return cur;
+      }
+    } else if (words = cm.getHelper(cm.getCursor(), "hintWords")) {
+      if (words) return CodeMirror.hint.fromList(cm, {words: words});
+    } else if (CodeMirror.hint.anyword) {
+      return CodeMirror.hint.anyword(cm, options);
+    }
+  });
+
+  CodeMirror.registerHelper("hint", "fromList", function(cm, options) {
+    var cur = cm.getCursor(), token = cm.getTokenAt(cur);
+    var found = [];
+    for (var i = 0; i < options.words.length; i++) {
+      var word = options.words[i];
+      if (word.slice(0, token.string.length) == token.string)
+        found.push(word);
+    }
+
+    if (found.length) return {
+      list: found,
+      from: CodeMirror.Pos(cur.line, token.start),
+            to: CodeMirror.Pos(cur.line, token.end)
+    };
+  });
+
+  CodeMirror.commands.autocomplete = CodeMirror.showHint;
+
+  var defaultOptions = {
+    hint: CodeMirror.hint.auto,
+    completeSingle: true,
+    alignWithWord: true,
+    closeCharacters: /[\s()\[\]{};:>,]/,
+    closeOnUnfocus: true,
+    completeOnSingleClick: false,
+    container: null,
+    customKeys: null,
+    extraKeys: null
+  };
+
+  CodeMirror.defineOption("hintOptions", null);
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5482d89/contrib/views/hive/src/main/resources/ui/hive-web/vendor/codemirror/sql-hint.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/vendor/codemirror/sql-hint.js b/contrib/views/hive/src/main/resources/ui/hive-web/vendor/codemirror/sql-hint.js
new file mode 100644
index 0000000..522f9e8
--- /dev/null
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/vendor/codemirror/sql-hint.js
@@ -0,0 +1,192 @@
+// CodeMirror, copyright (c) by Marijn Haverbeke and others
+// Distributed under an MIT license: http://codemirror.net/LICENSE
+
+(function(mod) {
+  if (typeof exports == "object" && typeof module == "object") // CommonJS
+    mod(require("../../lib/codemirror"), require("../../mode/sql/sql"));
+  else if (typeof define == "function" && define.amd) // AMD
+    define(["../../lib/codemirror", "../../mode/sql/sql"], mod);
+  else // Plain browser env
+    mod(CodeMirror);
+})(function(CodeMirror) {
+  "use strict";
+
+  var tables;
+  var defaultTable;
+  var keywords;
+  var CONS = {
+    QUERY_DIV: ";",
+    ALIAS_KEYWORD: "AS"
+  };
+  var Pos = CodeMirror.Pos;
+
+  function getKeywords(editor) {
+    var mode = editor.doc.modeOption;
+    if (mode === "sql") mode = "text/x-sql";
+    return CodeMirror.resolveMode(mode).keywords;
+  }
+
+  function match(string, word) {
+    var len = string.length;
+    var sub = word.substr(0, len);
+    return string.toUpperCase() === sub.toUpperCase();
+  }
+
+  function addMatches(result, search, wordlist, formatter) {
+    for (var word in wordlist) {
+      if (!wordlist.hasOwnProperty(word)) continue;
+      if (Array.isArray(wordlist)) {
+        word = wordlist[word];
+      }
+      if (match(search, word)) {
+        result.push(formatter(word));
+      }
+    }
+  }
+
+  function nameCompletion(result, editor) {
+    var cur = editor.getCursor();
+    var token = editor.getTokenAt(cur);
+    var useBacktick = (token.string.charAt(0) == "`");
+    var string = token.string.substr(1);
+    var prevToken = editor.getTokenAt(Pos(cur.line, token.start));
+    if (token.string.charAt(0) == "." || prevToken.string == "."){
+      //Suggest colunm names
+      prevToken = prevToken.string == "." ? editor.getTokenAt(Pos(cur.line, token.start - 1)) : prevToken;
+      var table = prevToken.string;
+      //Check if backtick is used in table name. If yes, use it for columns too.
+      var useBacktickTable = false;
+      if (table.match(/`/g)) {
+        useBacktickTable = true;
+        table = table.replace(/`/g, "");
+      }
+      //Check if table is available. If not, find table by Alias
+      if (!tables.hasOwnProperty(table))
+        table = findTableByAlias(table, editor);
+      var columns = tables[table];
+      if (!columns) return;
+
+      if (useBacktick) {
+        addMatches(result, string, columns, function(w) {return "`" + w + "`";});
+      }
+      else if(useBacktickTable) {
+        addMatches(result, string, columns, function(w) {return ".`" + w + "`";});
+      }
+      else {
+        addMatches(result, string, columns, function(w) {return "." + w;});
+      }
+    }
+    else {
+      //Suggest table names or colums in defaultTable
+      while (token.start && string.charAt(0) == ".") {
+        token = editor.getTokenAt(Pos(cur.line, token.start - 1));
+        string = token.string + string;
+      }
+      if (useBacktick) {
+        addMatches(result, string, tables, function(w) {return "`" + w + "`";});
+        addMatches(result, string, defaultTable, function(w) {return "`" + w + "`";});
+      }
+      else {
+        addMatches(result, string, tables, function(w) {return w;});
+        addMatches(result, string, defaultTable, function(w) {return w;});
+      }
+    }
+  }
+
+  function eachWord(lineText, f) {
+    if (!lineText) return;
+    var excepted = /[,;]/g;
+    var words = lineText.split(" ");
+    for (var i = 0; i < words.length; i++) {
+      f(words[i]?words[i].replace(excepted, '') : '');
+    }
+  }
+
+  function convertCurToNumber(cur) {
+    // max characters of a line is 999,999.
+    return cur.line + cur.ch / Math.pow(10, 6);
+  }
+
+  function convertNumberToCur(num) {
+    return Pos(Math.floor(num), +num.toString().split('.').pop());
+  }
+
+  function findTableByAlias(alias, editor) {
+    var doc = editor.doc;
+    var fullQuery = doc.getValue();
+    var aliasUpperCase = alias.toUpperCase();
+    var previousWord = "";
+    var table = "";
+    var separator = [];
+    var validRange = {
+      start: Pos(0, 0),
+      end: Pos(editor.lastLine(), editor.getLineHandle(editor.lastLine()).length)
+    };
+
+    //add separator
+    var indexOfSeparator = fullQuery.indexOf(CONS.QUERY_DIV);
+    while(indexOfSeparator != -1) {
+      separator.push(doc.posFromIndex(indexOfSeparator));
+      indexOfSeparator = fullQuery.indexOf(CONS.QUERY_DIV, indexOfSeparator+1);
+    }
+    separator.unshift(Pos(0, 0));
+    separator.push(Pos(editor.lastLine(), editor.getLineHandle(editor.lastLine()).text.length));
+
+    //find valid range
+    var prevItem = 0;
+    var current = convertCurToNumber(editor.getCursor());
+    for (var i=0; i< separator.length; i++) {
+      var _v = convertCurToNumber(separator[i]);
+      if (current > prevItem && current <= _v) {
+        validRange = { start: convertNumberToCur(prevItem), end: convertNumberToCur(_v) };
+        break;
+      }
+      prevItem = _v;
+    }
+
+    var query = doc.getRange(validRange.start, validRange.end, false);
+
+    for (var i = 0; i < query.length; i++) {
+      var lineText = query[i];
+      eachWord(lineText, function(word) {
+        var wordUpperCase = word.toUpperCase();
+        if (wordUpperCase === aliasUpperCase && tables.hasOwnProperty(previousWord)) {
+            table = previousWord;
+        }
+        if (wordUpperCase !== CONS.ALIAS_KEYWORD) {
+          previousWord = word;
+        }
+      });
+      if (table) break;
+    }
+    return table;
+  }
+
+  CodeMirror.registerHelper("hint", "sql", function(editor, options) {
+    tables = (options && options.tables) || {};
+    var defaultTableName = options && options.defaultTable;
+    defaultTable = (defaultTableName && tables[defaultTableName] || []);
+    keywords = keywords || getKeywords(editor);
+
+    var cur = editor.getCursor();
+    var result = [];
+    var token = editor.getTokenAt(cur), start, end, search;
+    if (token.string.match(/^[.`\w@]\w*$/)) {
+      search = token.string;
+      start = token.start;
+      end = token.end;
+    } else {
+      start = end = cur.ch;
+      search = "";
+    }
+    if (search.charAt(0) == "." || search.charAt(0) == "`") {
+      nameCompletion(result, editor);
+    } else {
+      addMatches(result, search, tables, function(w) {return w;});
+      addMatches(result, search, defaultTable, function(w) {return w;});
+      addMatches(result, search, keywords, function(w) {return w.toUpperCase();});
+    }
+
+    return {list: result, from: Pos(cur.line, start), to: Pos(cur.line, end)};
+  });
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5482d89/contrib/views/hive/src/main/resources/view.xml
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/view.xml b/contrib/views/hive/src/main/resources/view.xml
new file mode 100644
index 0000000..b936c6d
--- /dev/null
+++ b/contrib/views/hive/src/main/resources/view.xml
@@ -0,0 +1,160 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<view>
+    <name>HIVE</name>
+    <label>Hive</label>
+    <version>0.0.1</version>
+
+    <!-- HDFS Configs -->
+    <parameter>
+        <name>webhdfs.url</name>
+        <description>WebHDFS FileSystem URI (example: webhdfs://namenode:50070)</description>
+        <required>true</required>
+    </parameter>
+
+    <parameter>
+        <name>webhdfs.username</name>
+        <description>User and doAs for proxy user for HDFS</description>
+        <required>false</required>
+    </parameter>
+
+    <parameter>
+        <name>webhdfs.auth</name>
+        <description>Semicolon-separated authentication configs. Default: auth=SIMPLE</description>
+        <required>false</required>
+    </parameter>
+
+    <!-- General Configs -->
+
+    <parameter>
+        <name>dataworker.username</name>
+        <description>The username (defaults to ViewContext username)</description>
+        <required>false</required>
+    </parameter>
+
+    <parameter>
+        <name>scripts.dir</name>
+        <description>HDFS directory path to store Hive scripts (example: /users/${username})</description>
+        <required>true</required>
+    </parameter>
+
+    <parameter>
+        <name>jobs.dir</name>
+        <description>HDFS directory path to store Hive job status (example: /users/${username})</description>
+        <required>true</required>
+    </parameter>
+
+    <parameter>
+        <name>hive.host</name>
+        <description>HiveServer2 hostname or IP (example: 127.0.0.1)</description>
+        <required>true</required>
+    </parameter>
+
+    <parameter>
+        <name>hive.port</name>
+        <description>HiveServer2 Thrift port (example: 10000)</description>
+        <required>true</required>
+    </parameter>
+
+    <parameter>
+        <name>hive.auth</name>
+        <description>Semicolon-separated authentication configs. Default: auth=NOSASL</description>
+        <required>false</required>
+    </parameter>
+
+    <resource>
+        <name>savedQuery</name>
+        <plural-name>savedQueries</plural-name>
+        <id-property>id</id-property>
+        <resource-class>org.apache.ambari.view.hive.resources.savedQueries.SavedQuery</resource-class>
+        <provider-class>org.apache.ambari.view.hive.resources.savedQueries.SavedQueryResourceProvider</provider-class>
+        <service-class>org.apache.ambari.view.hive.resources.savedQueries.SavedQueryService</service-class>
+    </resource>
+
+    <resource>
+        <name>fileResource</name>
+        <plural-name>fileResources</plural-name>
+        <id-property>id</id-property>
+        <resource-class>org.apache.ambari.view.hive.resources.resources.FileResourceItem</resource-class>
+        <provider-class>org.apache.ambari.view.hive.resources.resources.FileResourceResourceProvider</provider-class>
+        <service-class>org.apache.ambari.view.hive.resources.resources.FileResourceService</service-class>
+    </resource>
+
+    <resource>
+        <name>udf</name>
+        <plural-name>udfs</plural-name>
+        <id-property>id</id-property>
+        <resource-class>org.apache.ambari.view.hive.resources.udfs.UDF</resource-class>
+        <provider-class>org.apache.ambari.view.hive.resources.udfs.UDFResourceProvider</provider-class>
+        <service-class>org.apache.ambari.view.hive.resources.udfs.UDFService</service-class>
+    </resource>
+
+    <resource>
+        <name>job</name>
+        <plural-name>jobs</plural-name>
+        <id-property>id</id-property>
+        <resource-class>org.apache.ambari.view.hive.resources.jobs.JobImpl</resource-class>
+        <provider-class>org.apache.ambari.view.hive.resources.jobs.JobResourceProvider</provider-class>
+        <service-class>org.apache.ambari.view.hive.resources.jobs.JobService</service-class>
+    </resource>
+
+    <resource>
+        <name>file</name>
+        <service-class>org.apache.ambari.view.hive.resources.files.FileService</service-class>
+    </resource>
+
+    <resource>
+        <name>ddl</name>
+        <service-class>org.apache.ambari.view.hive.resources.browser.HiveBrowserService</service-class>
+    </resource>
+
+    <resource>
+        <name>hive</name>
+        <service-class>org.apache.ambari.view.hive.HelpService</service-class>
+    </resource>
+
+    <persistence>
+        <entity>
+            <class>org.apache.ambari.view.hive.persistence.DataStoreStorage$SmokeTestEntity</class>
+            <id-property>id</id-property>
+        </entity>
+        <entity>
+            <class>org.apache.ambari.view.hive.resources.jobs.JobImpl</class>
+            <id-property>id</id-property>
+        </entity>
+        <entity>
+            <class>org.apache.ambari.view.hive.resources.jobs.StoredOperationHandle</class>
+            <id-property>id</id-property>
+        </entity>
+        <entity>
+            <class>org.apache.ambari.view.hive.resources.savedQueries.SavedQuery</class>
+            <id-property>id</id-property>
+        </entity>
+        <entity>
+            <class>org.apache.ambari.view.hive.resources.udfs.UDF</class>
+            <id-property>id</id-property>
+        </entity>
+        <entity>
+            <class>org.apache.ambari.view.hive.resources.resources.FileResourceItem</class>
+            <id-property>id</id-property>
+        </entity>
+        <entity>
+            <class>org.apache.ambari.view.hive.TestBean</class>
+            <id-property>id</id-property>
+        </entity>
+    </persistence>
+</view>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5482d89/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/BaseHiveTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/BaseHiveTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/BaseHiveTest.java
new file mode 100644
index 0000000..f38d0e9
--- /dev/null
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/BaseHiveTest.java
@@ -0,0 +1,103 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive;
+
+import com.google.inject.AbstractModule;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.ViewResourceHandler;
+import org.apache.hadoop.fs.FileUtil;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+
+import java.io.File;
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.easymock.EasyMock.*;
+
+public abstract class BaseHiveTest {
+  protected ViewResourceHandler handler;
+  protected ViewContext context;
+  protected static File hiveStorageFile;
+  protected static File baseDir;
+  protected Map<String, String> properties;
+
+  protected static String DATA_DIRECTORY = "./target/HiveTest";
+
+  @BeforeClass
+  public static void startUp() throws Exception {
+    File baseDir = new File(DATA_DIRECTORY)
+        .getAbsoluteFile();
+    FileUtil.fullyDelete(baseDir);
+  }
+
+  @AfterClass
+  public static void shutDown() throws Exception {
+  }
+
+  @Before
+  public void setUp() throws Exception {
+    handler = createNiceMock(ViewResourceHandler.class);
+    context = createNiceMock(ViewContext.class);
+
+    properties = new HashMap<String, String>();
+    baseDir = new File(DATA_DIRECTORY)
+        .getAbsoluteFile();
+    hiveStorageFile = new File("./target/HiveTest/storage.dat")
+        .getAbsoluteFile();
+
+    properties.put("dataworker.storagePath", hiveStorageFile.toString());
+    properties.put("scripts.dir", "/tmp/.hiveQueries");
+    properties.put("jobs.dir", "/tmp/.hiveJobs");
+
+    setupProperties(properties, baseDir);
+
+    expect(context.getProperties()).andReturn(properties).anyTimes();
+    expect(context.getUsername()).andReturn("ambari-qa").anyTimes();
+    expect(context.getInstanceName()).andReturn("MyHive").anyTimes();
+
+    replay(handler, context);
+  }
+
+  protected void setupProperties(Map<String, String> properties, File baseDir) throws Exception {
+
+  }
+
+  @After
+  public void tearDown() throws Exception {
+
+  }
+
+  protected static <T> T getService(Class<T> clazz,
+                                    final ViewResourceHandler viewResourceHandler,
+                                    final ViewContext viewInstanceContext) {
+    Injector viewInstanceInjector = Guice.createInjector(new AbstractModule() {
+      @Override
+      protected void configure() {
+        bind(ViewResourceHandler.class).toInstance(viewResourceHandler);
+        bind(ViewContext.class).toInstance(viewInstanceContext);
+      }
+    });
+    return viewInstanceInjector.getInstance(clazz);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5482d89/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/HDFSTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/HDFSTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/HDFSTest.java
new file mode 100644
index 0000000..0ee8eb3
--- /dev/null
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/HDFSTest.java
@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+
+import java.io.File;
+import java.util.Map;
+
+public abstract class HDFSTest extends BaseHiveTest {
+  protected static MiniDFSCluster hdfsCluster;
+  protected static String hdfsURI;
+
+  @BeforeClass
+  public static void startUp() throws Exception {
+    BaseHiveTest.startUp(); // super
+    File hdfsDir = new File("./target/HiveTest/hdfs/")
+        .getAbsoluteFile();
+    FileUtil.fullyDelete(hdfsDir);
+
+    Configuration conf = new Configuration();
+    conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, hdfsDir.getAbsolutePath());
+    conf.set("hadoop.proxyuser." + System.getProperty("user.name") + ".groups", "*");
+    conf.set("hadoop.proxyuser." + System.getProperty("user.name") + ".hosts", "*");
+
+    MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf);
+    hdfsCluster = builder.build();
+    hdfsURI = hdfsCluster.getURI().toString();
+  }
+
+  @AfterClass
+  public static void shutDown() throws Exception {
+    BaseHiveTest.shutDown();
+    hdfsCluster.shutdown();
+    hdfsCluster = null;
+  }
+
+  @Override
+  protected void setupProperties(Map<String, String> properties, File baseDir) throws Exception {
+    super.setupProperties(properties, baseDir);
+    properties.put("webhdfs.url", hdfsURI);
+    properties.put("webhdfs.username", System.getProperty("user.name"));
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5482d89/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/ServiceTestUtils.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/ServiceTestUtils.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/ServiceTestUtils.java
new file mode 100644
index 0000000..ac913a9
--- /dev/null
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/ServiceTestUtils.java
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive;
+
+import org.junit.Assert;
+
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriBuilder;
+import javax.ws.rs.core.UriInfo;
+
+import java.net.URI;
+
+import static org.easymock.EasyMock.*;
+
+public class ServiceTestUtils {
+  public static void assertHTTPResponseOK(Response response) {
+    Assert.assertEquals(200, response.getStatus());
+  }
+
+  public static void assertHTTPResponseCreated(Response response) {
+    Assert.assertEquals(201, response.getStatus());
+  }
+
+  public static void assertHTTPResponseNoContent(Response response) {
+    Assert.assertEquals(204, response.getStatus());
+  }
+
+  public static void expectLocationHeaderInResponse(HttpServletResponse resp_obj) {
+    resp_obj.setHeader(eq("Location"), anyString());
+  }
+
+  public static UriInfo getDefaultUriInfo() {
+    UriInfo uriInfo = createNiceMock(UriInfo.class);
+    URI uri = UriBuilder.fromUri("http://host/a/b").build();
+    expect(uriInfo.getAbsolutePath()).andReturn(uri);
+    replay(uriInfo);
+    return uriInfo;
+  }
+
+  public static HttpServletResponse getResponseWithLocation() {
+    HttpServletResponse resp_obj = createNiceMock(HttpServletResponse.class);
+    expectLocationHeaderInResponse(resp_obj);
+    replay(resp_obj);
+    return resp_obj;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5482d89/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/backgroundjobs/BackgroundJobControllerTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/backgroundjobs/BackgroundJobControllerTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/backgroundjobs/BackgroundJobControllerTest.java
new file mode 100644
index 0000000..ceb3677
--- /dev/null
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/backgroundjobs/BackgroundJobControllerTest.java
@@ -0,0 +1,77 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.backgroundjobs;
+
+import org.apache.ambari.view.hive.BaseHiveTest;
+import org.junit.Assert;
+import org.junit.Test;
+
+public class BackgroundJobControllerTest extends BaseHiveTest {
+
+  private static final long MAX_WAIT_TIME = 2000;
+
+  @Test
+  public void testStartJob() throws Exception {
+    BackgroundJobController backgroundJobController = new BackgroundJobController(context);
+
+    HangingRunnable runnable = new HangingRunnable();
+    backgroundJobController.startJob("key", runnable);
+
+    assertStateIs(backgroundJobController, "key", Thread.State.RUNNABLE);
+
+    runnable.goOn();
+    assertStateIs(backgroundJobController, "key", Thread.State.TERMINATED);
+  }
+
+  @Test
+  public void testInterrupt() throws Exception {
+    BackgroundJobController backgroundJobController = new BackgroundJobController(context);
+
+    HangingRunnable runnable = new HangingRunnable();
+    backgroundJobController.startJob("key", runnable);
+
+    assertStateIs(backgroundJobController, "key", Thread.State.RUNNABLE);
+
+    backgroundJobController.interrupt("key");
+    assertStateIs(backgroundJobController, "key", Thread.State.TERMINATED);
+  }
+
+  private void assertStateIs(BackgroundJobController backgroundJobController, String key, Thread.State state) throws InterruptedException {
+    long start = System.currentTimeMillis();
+    while (backgroundJobController.state(key) != state) {
+      Thread.sleep(100);
+      if (System.currentTimeMillis() - start > MAX_WAIT_TIME)
+        break;
+    }
+    Assert.assertEquals(state, backgroundJobController.state(key));
+  }
+
+  private static class HangingRunnable implements Runnable {
+    private boolean waitMe = true;
+
+    @Override
+    public void run() {
+      while(waitMe && !Thread.interrupted());
+    }
+
+    public void goOn() {
+      this.waitMe = false;
+    }
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5482d89/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/files/FileServiceTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/files/FileServiceTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/files/FileServiceTest.java
new file mode 100644
index 0000000..7ce2dd3
--- /dev/null
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/files/FileServiceTest.java
@@ -0,0 +1,214 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.files;
+
+import org.apache.ambari.view.hive.ServiceTestUtils;
+import org.apache.ambari.view.hive.HDFSTest;
+import org.apache.ambari.view.hive.utils.*;
+import org.apache.ambari.view.hive.utils.HdfsApi;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.Path;
+import org.json.simple.JSONObject;
+import org.junit.*;
+import org.junit.rules.ExpectedException;
+
+import javax.ws.rs.core.Response;
+import java.io.IOException;
+import java.nio.charset.Charset;
+import java.util.Arrays;
+
+public class FileServiceTest extends HDFSTest {
+  private final static int PAGINATOR_PAGE_SIZE = 4;  //4 bytes
+  private FileService fileService;
+
+  @Rule public ExpectedException thrown = ExpectedException.none();
+
+  @Override
+  @Before
+  public void setUp() throws Exception {
+    super.setUp();
+    fileService = getService(FileService.class, handler, context);
+    FilePaginator.setPageSize(PAGINATOR_PAGE_SIZE);
+  }
+
+  @BeforeClass
+  public static void startUp() throws Exception {
+    HDFSTest.startUp(); // super
+  }
+
+  @AfterClass
+  public static void shutDown() throws Exception {
+    HDFSTest.shutDown(); // super
+    HdfsApi.dropAllConnections(); //cleanup API connection
+  }
+
+  @Test
+  public void testCreateFile() throws IOException, InterruptedException {
+    Response response = createFile("/tmp/testCreateFile", "testCreateFile content");
+
+    ServiceTestUtils.assertHTTPResponseNoContent(response);
+    assertHDFSFileContains("/tmp/testCreateFile", "testCreateFile content");
+  }
+
+  @Test
+  public void testCreateExistingFileForbidden() throws IOException, InterruptedException {
+    createFile("/tmp/testOverwriteFile", "original content");
+    thrown.expect(ServiceFormattedException.class);
+    createFile("/tmp/testOverwriteFile", "new content");
+  }
+
+  @Test
+  public void testCreateFilePathNotExists() throws IOException, InterruptedException {
+    Response response = createFile("/non/existent/path/Luke", null);
+    ServiceTestUtils.assertHTTPResponseNoContent(response);
+
+    Response response2 = createFile("/tmp/Leia", null);
+    ServiceTestUtils.assertHTTPResponseNoContent(response2);
+
+    thrown.expect(ServiceFormattedException.class);
+    Response response3 = createFile("/tmp/Leia", null); // file already exists
+    Assert.assertEquals(400, response3.getStatus());
+  }
+
+  @Test
+  public void testUpdateFileContent() throws Exception {
+    createFile("/tmp/testUpdateFileContent", "some content");
+
+    FileService.FileResourceRequest updateRequest = new FileService.FileResourceRequest();
+    updateRequest.file = new FileResource();
+    updateRequest.file.setFileContent("new content");
+
+    Response response = fileService.updateFile(updateRequest, "/tmp/testUpdateFileContent");
+
+    ServiceTestUtils.assertHTTPResponseNoContent(response);
+    assertHDFSFileContains("/tmp/testUpdateFileContent", "new content");
+  }
+
+  @Test
+  public void testPagination() throws Exception {
+    createFile("/tmp/testPagination", "1234567890");  // 10 bytes, 3 pages if 1 page is 4 bytes
+
+    Response response = fileService.getFilePage("/tmp/testPagination", 0L);
+    ServiceTestUtils.assertHTTPResponseOK(response);
+
+    JSONObject obj = ((JSONObject) response.getEntity());
+    assertFileJsonResponseSanity(obj);
+
+    FileResource firstPage = (FileResource) obj.get("file");
+    Assert.assertEquals("1234", firstPage.getFileContent());
+    Assert.assertEquals(3, firstPage.getPageCount());
+    Assert.assertEquals(0, firstPage.getPage());
+    Assert.assertTrue(firstPage.isHasNext());
+    Assert.assertEquals("/tmp/testPagination", firstPage.getFilePath());
+
+
+    response = fileService.getFilePage("/tmp/testPagination", 1L);
+    ServiceTestUtils.assertHTTPResponseOK(response);
+
+    FileResource secondPage = (FileResource) ((JSONObject) response.getEntity()).get("file");
+    Assert.assertEquals("5678", secondPage.getFileContent());
+    Assert.assertEquals(1, secondPage.getPage());
+    Assert.assertTrue(secondPage.isHasNext());
+
+
+    response = fileService.getFilePage("/tmp/testPagination", 2L);
+    ServiceTestUtils.assertHTTPResponseOK(response);
+
+    FileResource thirdPage = (FileResource) ((JSONObject) response.getEntity()).get("file");
+    Assert.assertEquals("90", thirdPage.getFileContent());
+    Assert.assertEquals(2, thirdPage.getPage());
+    Assert.assertFalse(thirdPage.isHasNext());
+
+
+    thrown.expect(BadRequestFormattedException.class);
+    fileService.getFilePage("/tmp/testPagination", 3L);
+  }
+
+  @Test
+  public void testZeroLengthFile() throws Exception {
+    createFile("/tmp/testZeroLengthFile", "");
+
+    Response response = fileService.getFilePage("/tmp/testZeroLengthFile", 0L);
+
+    ServiceTestUtils.assertHTTPResponseOK(response);
+    JSONObject obj = ((JSONObject) response.getEntity());
+    assertFileJsonResponseSanity(obj);
+
+    FileResource fileResource = (FileResource) obj.get("file");
+    Assert.assertEquals("", fileResource.getFileContent());
+    Assert.assertEquals(0, fileResource.getPage());
+    Assert.assertFalse(fileResource.isHasNext());
+  }
+
+  @Test
+  public void testFileNotFound() throws IOException, InterruptedException {
+    assertHDFSFileNotExists("/tmp/notExistentFile");
+
+    thrown.expect(NotFoundFormattedException.class);
+    fileService.getFilePage("/tmp/notExistentFile", 2L);
+  }
+
+  @Test
+  public void testDeleteFile() throws IOException, InterruptedException {
+    createFile("/tmp/testDeleteFile", "some content");
+
+    assertHDFSFileExists("/tmp/testDeleteFile");
+
+    Response response = fileService.deleteFile("/tmp/testDeleteFile");
+    ServiceTestUtils.assertHTTPResponseNoContent(response);
+
+    assertHDFSFileNotExists("/tmp/testDeleteFile");
+  }
+
+
+
+  private Response createFile(String filePath, String content) throws IOException, InterruptedException {
+    FileService.FileResourceRequest request = new FileService.FileResourceRequest();
+    request.file = new FileResource();
+    request.file.setFilePath(filePath);
+    request.file.setFileContent(content);
+
+    return fileService.createFile(request,
+        ServiceTestUtils.getResponseWithLocation(), ServiceTestUtils.getDefaultUriInfo());
+  }
+
+
+  private void assertFileJsonResponseSanity(JSONObject obj) {
+    Assert.assertTrue(obj.containsKey("file"));
+  }
+
+  private void assertHDFSFileContains(String filePath, String expectedContent) throws IOException {
+    FSDataInputStream fileInputStream = hdfsCluster.getFileSystem().open(new Path(filePath));
+    byte[] buffer = new byte[256];
+    int read = fileInputStream.read(buffer);
+
+    byte[] readData = Arrays.copyOfRange(buffer, 0, read);
+    String actualContent = new String(readData, Charset.forName("UTF-8"));
+
+    Assert.assertEquals(expectedContent, actualContent);
+  }
+
+  private void assertHDFSFileExists(String filePath) throws IOException {
+    Assert.assertTrue( hdfsCluster.getFileSystem().exists(new Path(filePath)) );
+  }
+
+  private void assertHDFSFileNotExists(String filePath) throws IOException {
+    Assert.assertFalse(hdfsCluster.getFileSystem().exists(new Path(filePath)) );
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5482d89/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/JobServiceTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/JobServiceTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/JobServiceTest.java
new file mode 100644
index 0000000..78b6f1f
--- /dev/null
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/JobServiceTest.java
@@ -0,0 +1,210 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.jobs;
+
+import org.apache.ambari.view.hive.ServiceTestUtils;
+import org.apache.ambari.view.hive.BaseHiveTest;
+import org.apache.ambari.view.hive.utils.HdfsApiMock;
+import org.apache.ambari.view.hive.client.Connection;
+import org.apache.ambari.view.hive.client.ConnectionPool;
+import org.apache.ambari.view.hive.client.HiveClientException;
+import org.apache.ambari.view.hive.resources.savedQueries.SavedQuery;
+import org.apache.ambari.view.hive.resources.savedQueries.SavedQueryService;
+import org.apache.ambari.view.hive.utils.BadRequestFormattedException;
+import org.apache.ambari.view.hive.utils.HdfsApi;
+import org.apache.hive.service.cli.thrift.*;
+import org.json.simple.JSONObject;
+import org.junit.*;
+import org.junit.rules.ExpectedException;
+
+import javax.ws.rs.core.Response;
+import java.io.File;
+import java.io.IOException;
+import java.util.Map;
+
+import static org.easymock.EasyMock.*;
+
+public class JobServiceTest extends BaseHiveTest {
+  private SavedQueryService savedQueryService;
+  private JobService jobService;
+  @Rule public ExpectedException thrown = ExpectedException.none();
+
+  @BeforeClass
+  public static void startUp() throws Exception {
+    BaseHiveTest.startUp(); // super
+  }
+
+  @AfterClass
+  public static void shutDown() throws Exception {
+    BaseHiveTest.shutDown(); // super
+    HdfsApi.dropAllConnections(); //cleanup API connection
+  }
+
+  @Override
+  @Before
+  public void setUp() throws Exception {
+    super.setUp();
+    savedQueryService = getService(SavedQueryService.class, handler, context);
+    jobService = getService(JobService.class, handler, context);
+
+    Connection hiveConnection = configureHiveConnectionMock();
+
+    ConnectionPool.setInstance(context, hiveConnection);
+  }
+
+  @Test
+  public void createJobFromQuery() throws IOException, InterruptedException {
+    setupHdfsApiMock();
+
+    SavedQuery savedQueryForJob = createSavedQuery("Test", null);
+    JobService.JobRequest jobCreationRequest = new JobService.JobRequest();
+    jobCreationRequest.job = new JobImpl();
+    jobCreationRequest.job.setQueryId(savedQueryForJob.getId());
+
+
+    Response response = jobService.create(jobCreationRequest,
+        ServiceTestUtils.getResponseWithLocation(), ServiceTestUtils.getDefaultUriInfo());
+    ServiceTestUtils.assertHTTPResponseCreated(response);
+    JSONObject jobObj = (JSONObject)response.getEntity();
+
+
+    assertResponseJobSanity(jobObj);
+    Assert.assertEquals(getFieldFromJobJSON(jobObj, "queryId"), savedQueryForJob.getId());
+  }
+
+  @Test
+  public void createJobForcedContent() throws IOException, InterruptedException {
+    HdfsApiMock hdfsApiMock = setupHdfsApiMock();
+
+    JobService.JobRequest request = new JobService.JobRequest();
+    request.job = new JobImpl();
+    request.job.setForcedContent("Hello world");
+
+
+    Response response = jobService.create(request,
+        ServiceTestUtils.getResponseWithLocation(), ServiceTestUtils.getDefaultUriInfo());
+    ServiceTestUtils.assertHTTPResponseCreated(response);
+    JSONObject jobObj = (JSONObject)response.getEntity();
+
+
+    assertResponseJobSanity(jobObj);
+    Assert.assertNull(getFieldFromJobJSON(jobObj, "queryId"));
+    Assert.assertEquals("", getFieldFromJobJSON(jobObj, "forcedContent"));
+    Assert.assertEquals("Hello world", hdfsApiMock.getQueryOutputStream().toString());
+  }
+
+  @Test
+  public void createJobNoSource() throws IOException, InterruptedException {
+    HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
+    expect(hdfsApi.mkdir(anyString())).andReturn(true).anyTimes();
+    HdfsApi.setInstance(context, hdfsApi);
+    replay(hdfsApi);
+
+    JobService.JobRequest request = new JobService.JobRequest();
+    request.job = new JobImpl();
+    request.job.setForcedContent(null);
+    request.job.setQueryId(null);
+
+    thrown.expect(BadRequestFormattedException.class);
+    jobService.create(request,
+        ServiceTestUtils.getResponseWithLocation(), ServiceTestUtils.getDefaultUriInfo());
+  }
+
+
+
+  private Connection configureHiveConnectionMock() throws HiveClientException {
+    TGetOperationStatusResp statusResp = getOperationStatusResp();
+    TOperationHandle operationHandle = getExecutionOperationHandle();
+
+    Connection connection = createNiceMock(Connection.class);
+    expect(connection.executeAsync(anyString())).andReturn(operationHandle).anyTimes();
+    expect(connection.getLogs(anyObject(TOperationHandle.class))).andReturn("some logs").anyTimes();
+    expect(connection.getOperationStatus(anyObject(TOperationHandle.class))).andReturn(statusResp).anyTimes();
+
+    replay(connection);
+    return connection;
+  }
+
+  private TGetOperationStatusResp getOperationStatusResp() {
+    TStatus status = new TStatus();
+    status.setStatusCode(TStatusCode.SUCCESS_STATUS);
+
+    TGetOperationStatusResp statusResp = new TGetOperationStatusResp();
+    statusResp.setStatus(status);
+
+    return statusResp;
+  }
+
+  private TOperationHandle getExecutionOperationHandle() {
+    THandleIdentifier handleIdentifier = new THandleIdentifier();
+    handleIdentifier.setGuid("some guid".getBytes());
+    handleIdentifier.setSecret("some secret".getBytes());
+
+    TOperationHandle operationHandle = new TOperationHandle();
+    operationHandle.setHasResultSet(true);
+    operationHandle.setModifiedRowCount(0);
+    operationHandle.setOperationType(TOperationType.EXECUTE_STATEMENT);
+    operationHandle.setOperationId(handleIdentifier);
+    return operationHandle;
+  }
+
+  @Override
+  protected void setupProperties(Map<String, String> properties, File baseDir) throws Exception {
+    super.setupProperties(properties, baseDir);
+    properties.put("scripts.dir", "/tmp/.hiveQueries");
+    properties.put("jobs.dir", "/tmp/.hiveJobs");
+  }
+
+  public static Response doCreateSavedQuery(String title, String path, SavedQueryService service) {
+    SavedQueryService.SavedQueryRequest request = new SavedQueryService.SavedQueryRequest();
+    request.savedQuery = new SavedQuery();
+    request.savedQuery.setTitle(title);
+    request.savedQuery.setQueryFile(path);
+
+    return service.create(request,
+        ServiceTestUtils.getResponseWithLocation(), ServiceTestUtils.getDefaultUriInfo());
+  }
+
+  private SavedQuery createSavedQuery(String title, String path) {
+    Response response = doCreateSavedQuery(title, path, savedQueryService);
+    JSONObject obj = (JSONObject)response.getEntity();
+    SavedQuery query = ((SavedQuery) obj.get("savedQuery"));
+    return query;
+  }
+
+
+  private Object getFieldFromJobJSON(JSONObject jobObj, String field) {
+    return ((Map) jobObj.get("job")).get(field);
+  }
+
+  private HdfsApiMock setupHdfsApiMock() throws IOException, InterruptedException {
+    HdfsApiMock hdfsApiMock = new HdfsApiMock("select * from Z");
+    HdfsApi hdfsApi = hdfsApiMock.getHdfsApi();
+    HdfsApi.setInstance(context, hdfsApi);
+    replay(hdfsApi);
+    return hdfsApiMock;
+  }
+
+  private void assertResponseJobSanity(JSONObject jobObj) {
+    Assert.assertTrue(jobObj.containsKey("job"));
+    Assert.assertNotNull(((Map) jobObj.get("job")).get("id"));
+    Assert.assertNotNull(((Map) jobObj.get("job")).get("queryFile"));
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5482d89/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/LogParserTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/LogParserTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/LogParserTest.java
new file mode 100644
index 0000000..0c060ed
--- /dev/null
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/LogParserTest.java
@@ -0,0 +1,73 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.jobs;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+public class LogParserTest {
+    @Test
+    public void testParseMRLog() {
+        String log = "INFO : Number of reduce tasks determined at compile time: 1\n" +
+            "INFO : In order to change the average load for a reducer (in bytes):\n" +
+            "INFO : set hive.exec.reducers.bytes.per.reducer=<number>\n" +
+            "INFO : In order to limit the maximum number of reducers:\n" +
+            "INFO : set hive.exec.reducers.max=<number>\n" +
+            "INFO : In order to set a constant number of reducers:\n" +
+            "INFO : set mapreduce.job.reduces=<number>\n" +
+            "WARN : Hadoop command-line option parsing not performed. Implement the Tool interface and execute your application with ToolRunner to remedy this.\n" +
+            "INFO : number of splits:1\n" +
+            "INFO : Submitting tokens for job: job_1421248330903_0003\n" +
+            "INFO : The url to track the job: http://dataworker.hortonworks.com:8088/proxy/application_1421248330903_0003/\n" +
+            "INFO : Starting Job = job_1421248330903_0003, Tracking URL = http://dataworker.hortonworks.com:8088/proxy/application_1421248330903_0003/\n" +
+            "INFO : Kill Command = /usr/hdp/current/hadoop-client/bin/hadoop job -kill job_1421248330903_0003\n" +
+            "INFO : Hadoop job information for Stage-1: number of mappers: 1; number of reducers: 1\n" +
+            "INFO : 2015-01-21 15:03:55,979 Stage-1 map = 0%, reduce = 0%\n" +
+            "INFO : 2015-01-21 15:04:07,503 Stage-1 map = 100%, reduce = 0%, Cumulative CPU 0.79 sec\n" +
+            "INFO : 2015-01-21 15:04:17,384 Stage-1 map = 100%, reduce = 100%, Cumulative CPU 1.86 sec\n" +
+            "INFO : MapReduce Total cumulative CPU time: 1 seconds 860 msec\n" +
+            "INFO : Ended Job = job_1421248330903_0003";
+
+        LogParser p = LogParser.parseLog(log);
+        Assert.assertEquals(1, p.getJobsList().size());
+        Assert.assertEquals("application_1421248330903_0003",(((LogParser.JobId) (p.getJobsList().toArray())[0])
+                                                            .getIdentifier()));
+    }
+
+    @Test
+    public void testParseTezLog() {
+        String log = "INFO : Tez session hasn't been created yet. Opening session\n" +
+            "INFO :\n" +
+            "\n" +
+            "INFO : Status: Running (Executing on YARN cluster with App id application_1423156117563_0003)\n" +
+            "\n" +
+            "INFO : Map 1: -/- Reducer 2: 0/1\n" +
+            "INFO : Map 1: 0/1 Reducer 2: 0/1\n" +
+            "INFO : Map 1: 0/1 Reducer 2: 0/1\n" +
+            "INFO : Map 1: 0(+1)/1 Reducer 2: 0/1\n" +
+            "INFO : Map 1: 0(+1)/1 Reducer 2: 0/1\n" +
+            "INFO : Map 1: 1/1 Reducer 2: 0(+1)/1\n" +
+            "INFO : Map 1: 1/1 Reducer 2: 1/1 ";
+
+        LogParser p = LogParser.parseLog(log);
+        Assert.assertEquals(1, p.getJobsList().size());
+        Assert.assertEquals("application_1423156117563_0003",(((LogParser.JobId) (p.getJobsList().toArray())[0])
+            .getIdentifier()));
+    }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5482d89/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/resources/FileResourceServiceTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/resources/FileResourceServiceTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/resources/FileResourceServiceTest.java
new file mode 100644
index 0000000..06d5269
--- /dev/null
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/resources/FileResourceServiceTest.java
@@ -0,0 +1,119 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.resources;
+
+import org.apache.ambari.view.hive.BaseHiveTest;
+import org.apache.ambari.view.hive.resources.resources.FileResourceItem;
+import org.apache.ambari.view.hive.resources.resources.FileResourceService;
+import org.apache.ambari.view.hive.utils.NotFoundFormattedException;
+import org.json.simple.JSONObject;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriBuilder;
+import javax.ws.rs.core.UriInfo;
+import java.net.URI;
+
+import static org.easymock.EasyMock.*;
+
+public class FileResourceServiceTest extends BaseHiveTest {
+  @Rule public ExpectedException thrown = ExpectedException.none();
+  private FileResourceService resourceService;
+
+  @Override
+  @Before
+  public void setUp() throws Exception {
+    super.setUp();
+    resourceService = getService(FileResourceService.class, handler, context);
+  }
+
+  private Response doCreateFileResourceItem() {
+    FileResourceService.ResourceRequest request = new FileResourceService.ResourceRequest();
+    request.fileResource = new FileResourceItem();
+    request.fileResource.setPath("/tmp/file.jar");
+    request.fileResource.setName("TestFileResourceItem");
+
+    UriInfo uriInfo = createNiceMock(UriInfo.class);
+    URI uri = UriBuilder.fromUri("http://host/a/b").build();
+    expect(uriInfo.getAbsolutePath()).andReturn(uri);
+
+    HttpServletResponse resp_obj = createNiceMock(HttpServletResponse.class);
+
+    resp_obj.setHeader(eq("Location"), anyString());
+
+    replay(uriInfo, resp_obj);
+    return resourceService.create(request, resp_obj, uriInfo);
+  }
+
+  @Test
+  public void createFileResourceItem() {
+    Response response = doCreateFileResourceItem();
+    Assert.assertEquals(201, response.getStatus());
+
+    JSONObject obj = (JSONObject)response.getEntity();
+    Assert.assertTrue(obj.containsKey("fileResource"));
+    Assert.assertNotNull(((FileResourceItem) obj.get("fileResource")).getId());
+    Assert.assertFalse(((FileResourceItem) obj.get("fileResource")).getId() == null);
+  }
+
+  @Test
+  public void resourceNotFound() {
+    thrown.expect(NotFoundFormattedException.class);
+    resourceService.getOne("4242");
+  }
+
+  @Test
+  public void updateFileResourceItem() {
+    Response createdFileResourceItem = doCreateFileResourceItem();
+    Integer createdUdfId = ((FileResourceItem) ((JSONObject) createdFileResourceItem.getEntity()).get("fileResource")).getId();
+
+    FileResourceService.ResourceRequest request = new FileResourceService.ResourceRequest();
+    request.fileResource = new FileResourceItem();
+    request.fileResource.setPath("/tmp/updatedFileResourceItem.jar");
+    request.fileResource.setName("TestFileResourceItem2");
+
+    Response response = resourceService.update(request, String.valueOf(createdUdfId));
+    Assert.assertEquals(204, response.getStatus());
+
+    Response response2 = resourceService.getOne(String.valueOf(createdUdfId));
+    Assert.assertEquals(200, response2.getStatus());
+
+    JSONObject obj = ((JSONObject) response2.getEntity());
+    Assert.assertTrue(obj.containsKey("fileResource"));
+    Assert.assertEquals(((FileResourceItem) obj.get("fileResource")).getName(), request.fileResource.getName());
+    Assert.assertEquals(((FileResourceItem) obj.get("fileResource")).getPath(), request.fileResource.getPath());
+  }
+
+  @Test
+  public void deleteFileResourceItem() {
+    Response createdFileResourceItem = doCreateFileResourceItem();
+    Integer createdUdfId = ((FileResourceItem) ((JSONObject) createdFileResourceItem.getEntity()).get("fileResource")).getId();
+
+    Response response = resourceService.delete(String.valueOf(createdUdfId));
+    Assert.assertEquals(204, response.getStatus());
+
+    thrown.expect(NotFoundFormattedException.class);
+    resourceService.getOne(String.valueOf(createdUdfId));
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5482d89/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryServiceTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryServiceTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryServiceTest.java
new file mode 100644
index 0000000..d369bb2
--- /dev/null
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryServiceTest.java
@@ -0,0 +1,177 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.savedQueries;
+
+import org.apache.ambari.view.hive.HDFSTest;
+import org.apache.ambari.view.hive.utils.HdfsApi;
+import org.apache.ambari.view.hive.utils.NotFoundFormattedException;
+import org.json.simple.JSONObject;
+import org.junit.*;
+import org.junit.rules.ExpectedException;
+
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriBuilder;
+import javax.ws.rs.core.UriInfo;
+import java.io.File;
+import java.net.URI;
+import java.util.List;
+import java.util.Map;
+
+import static org.easymock.EasyMock.*;
+
+public class SavedQueryServiceTest extends HDFSTest {
+  //TODO: run without HDFS cluster
+  private SavedQueryService savedQueryService;
+  @Rule public ExpectedException thrown = ExpectedException.none();
+
+  @BeforeClass
+  public static void startUp() throws Exception {
+      HDFSTest.startUp(); // super
+  }
+
+  @AfterClass
+  public static void shutDown() throws Exception {
+    HDFSTest.shutDown(); // super
+    HdfsApi.dropAllConnections(); //cleanup API connection
+  }
+
+  @Override
+  @Before
+  public void setUp() throws Exception {
+    super.setUp();
+    savedQueryService = getService(SavedQueryService.class, handler, context);
+    SavedQueryResourceManager.getViewSingletonObjects().clear();
+  }
+
+  @Override
+  protected void setupProperties(Map<String, String> properties, File baseDir) throws Exception {
+    super.setupProperties(properties, baseDir);
+    properties.put("scripts.dir", "/tmp/.hiveQueries");
+  }
+
+  private Response doCreateSavedQuery() {
+      return doCreateSavedQuery("Luke", "/tmp/luke.hql", savedQueryService);
+  }
+
+  public static Response doCreateSavedQuery(String title, String path, SavedQueryService service) {
+    SavedQueryService.SavedQueryRequest request = new SavedQueryService.SavedQueryRequest();
+    request.savedQuery = new SavedQuery();
+    request.savedQuery.setTitle(title);
+    request.savedQuery.setQueryFile(path);
+
+    UriInfo uriInfo = createNiceMock(UriInfo.class);
+    URI uri = UriBuilder.fromUri("http://host/a/b").build();
+    expect(uriInfo.getAbsolutePath()).andReturn(uri);
+
+    HttpServletResponse resp_obj = createNiceMock(HttpServletResponse.class);
+
+    resp_obj.setHeader(eq("Location"), anyString());
+
+    replay(uriInfo, resp_obj);
+    return service.create(request, resp_obj, uriInfo);
+  }
+
+  private Response doCreateSavedQuery(String title, String path) {
+      return doCreateSavedQuery(title, path, savedQueryService);
+  }
+
+  @Test
+  public void createSavedQuery() {
+    Response response = doCreateSavedQuery();
+    Assert.assertEquals(201, response.getStatus());
+
+    JSONObject obj = (JSONObject)response.getEntity();
+    Assert.assertTrue(obj.containsKey("savedQuery"));
+    Assert.assertNotNull(((SavedQuery) obj.get("savedQuery")).getId());
+    Assert.assertTrue(((SavedQuery) obj.get("savedQuery")).getId() != null);
+  }
+
+  @Test
+  public void createSavedQueryAutoCreate() {
+    Response response = doCreateSavedQuery("Test", null);
+    Assert.assertEquals(201, response.getStatus());
+
+    JSONObject obj = (JSONObject)response.getEntity();
+    Assert.assertTrue(obj.containsKey("savedQuery"));
+    Assert.assertNotNull(((SavedQuery) obj.get("savedQuery")).getId());
+    Assert.assertFalse(((SavedQuery) obj.get("savedQuery")).getId() == null);
+    Assert.assertFalse(((SavedQuery) obj.get("savedQuery")).getQueryFile().isEmpty());
+  }
+
+  @Test
+  public void notFound() {
+    thrown.expect(NotFoundFormattedException.class);
+    savedQueryService.getOne("4242");
+  }
+
+  @Test
+  public void update() {
+    Response created = doCreateSavedQuery();
+    Integer createdId = ((SavedQuery) ((JSONObject) created.getEntity()).get("savedQuery")).getId();
+
+    SavedQueryService.SavedQueryRequest request = new SavedQueryService.SavedQueryRequest();
+    request.savedQuery = new SavedQuery();
+    request.savedQuery.setTitle("Updated Query");
+
+    Response response = savedQueryService.update(request, String.valueOf(createdId));
+    Assert.assertEquals(204, response.getStatus());
+
+    Response response2 = savedQueryService.getOne(String.valueOf(createdId));
+    Assert.assertEquals(200, response2.getStatus());
+
+    JSONObject obj = ((JSONObject) response2.getEntity());
+    Assert.assertTrue(obj.containsKey("savedQuery"));
+    Assert.assertEquals(((SavedQuery) obj.get("savedQuery")).getTitle(), request.savedQuery.getTitle());
+  }
+
+  @Test
+  public void delete() {
+    Response created = doCreateSavedQuery();
+    Integer createdId = ((SavedQuery) ((JSONObject) created.getEntity()).get("savedQuery")).getId();
+
+    Response response = savedQueryService.delete(String.valueOf(createdId));
+    Assert.assertEquals(204, response.getStatus());
+
+    thrown.expect(NotFoundFormattedException.class);
+    savedQueryService.getOne(String.valueOf(createdId));
+  }
+
+  @Test
+  public void list() {
+    doCreateSavedQuery("Title 1", "/path/to/file.hql");
+    doCreateSavedQuery("Title 2", "/path/to/file.hql");
+
+    Response response = savedQueryService.getList();
+    Assert.assertEquals(200, response.getStatus());
+
+    JSONObject obj = (JSONObject) response.getEntity();
+    Assert.assertTrue(obj.containsKey("savedQueries"));
+    List<SavedQuery> items = (List<SavedQuery>) obj.get("savedQueries");
+    boolean containsTitle = false;
+    for(SavedQuery item : items)
+        containsTitle = containsTitle || item.getTitle().compareTo("Title 1") == 0;
+    Assert.assertTrue(containsTitle);
+
+    containsTitle = false;
+    for(SavedQuery item : items)
+        containsTitle = containsTitle || item.getTitle().compareTo("Title 2") == 0;
+    Assert.assertTrue(containsTitle);
+  }
+}


Mime
View raw message