cmda-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From xingwei...@apache.org
Subject [11/51] [partial] incubator-cmda git commit: Update ApacheCMDA_1.0
Date Fri, 16 Oct 2015 23:11:45 GMT
http://git-wip-us.apache.org/repos/asf/incubator-cmda/blob/a9a83675/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/mocha/lib/suite.js
----------------------------------------------------------------------
diff --git a/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/mocha/lib/suite.js b/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/mocha/lib/suite.js
new file mode 100644
index 0000000..869bb88
--- /dev/null
+++ b/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/mocha/lib/suite.js
@@ -0,0 +1,296 @@
+
+/**
+ * Module dependencies.
+ */
+
+var EventEmitter = require('events').EventEmitter
+  , debug = require('debug')('mocha:suite')
+  , milliseconds = require('./ms')
+  , utils = require('./utils')
+  , Hook = require('./hook');
+
+/**
+ * Expose `Suite`.
+ */
+
+exports = module.exports = Suite;
+
+/**
+ * Create a new `Suite` with the given `title`
+ * and parent `Suite`. When a suite with the
+ * same title is already present, that suite
+ * is returned to provide nicer reporter
+ * and more flexible meta-testing.
+ *
+ * @param {Suite} parent
+ * @param {String} title
+ * @return {Suite}
+ * @api public
+ */
+
+exports.create = function(parent, title){
+  var suite = new Suite(title, parent.ctx);
+  suite.parent = parent;
+  if (parent.pending) suite.pending = true;
+  title = suite.fullTitle();
+  parent.addSuite(suite);
+  return suite;
+};
+
+/**
+ * Initialize a new `Suite` with the given
+ * `title` and `ctx`.
+ *
+ * @param {String} title
+ * @param {Context} ctx
+ * @api private
+ */
+
+function Suite(title, ctx) {
+  this.title = title;
+  this.ctx = ctx;
+  this.suites = [];
+  this.tests = [];
+  this.pending = false;
+  this._beforeEach = [];
+  this._beforeAll = [];
+  this._afterEach = [];
+  this._afterAll = [];
+  this.root = !title;
+  this._timeout = 2000;
+  this._slow = 75;
+  this._bail = false;
+}
+
+/**
+ * Inherit from `EventEmitter.prototype`.
+ */
+
+Suite.prototype.__proto__ = EventEmitter.prototype;
+
+/**
+ * Return a clone of this `Suite`.
+ *
+ * @return {Suite}
+ * @api private
+ */
+
+Suite.prototype.clone = function(){
+  var suite = new Suite(this.title);
+  debug('clone');
+  suite.ctx = this.ctx;
+  suite.timeout(this.timeout());
+  suite.slow(this.slow());
+  suite.bail(this.bail());
+  return suite;
+};
+
+/**
+ * Set timeout `ms` or short-hand such as "2s".
+ *
+ * @param {Number|String} ms
+ * @return {Suite|Number} for chaining
+ * @api private
+ */
+
+Suite.prototype.timeout = function(ms){
+  if (0 == arguments.length) return this._timeout;
+  if ('string' == typeof ms) ms = milliseconds(ms);
+  debug('timeout %d', ms);
+  this._timeout = parseInt(ms, 10);
+  return this;
+};
+
+/**
+ * Set slow `ms` or short-hand such as "2s".
+ *
+ * @param {Number|String} ms
+ * @return {Suite|Number} for chaining
+ * @api private
+ */
+
+Suite.prototype.slow = function(ms){
+  if (0 === arguments.length) return this._slow;
+  if ('string' == typeof ms) ms = milliseconds(ms);
+  debug('slow %d', ms);
+  this._slow = ms;
+  return this;
+};
+
+/**
+ * Sets whether to bail after first error.
+ *
+ * @parma {Boolean} bail
+ * @return {Suite|Number} for chaining
+ * @api private
+ */
+
+Suite.prototype.bail = function(bail){
+  if (0 == arguments.length) return this._bail;
+  debug('bail %s', bail);
+  this._bail = bail;
+  return this;
+};
+
+/**
+ * Run `fn(test[, done])` before running tests.
+ *
+ * @param {Function} fn
+ * @return {Suite} for chaining
+ * @api private
+ */
+
+Suite.prototype.beforeAll = function(fn){
+  if (this.pending) return this;
+  var hook = new Hook('"before all" hook', fn);
+  hook.parent = this;
+  hook.timeout(this.timeout());
+  hook.slow(this.slow());
+  hook.ctx = this.ctx;
+  this._beforeAll.push(hook);
+  this.emit('beforeAll', hook);
+  return this;
+};
+
+/**
+ * Run `fn(test[, done])` after running tests.
+ *
+ * @param {Function} fn
+ * @return {Suite} for chaining
+ * @api private
+ */
+
+Suite.prototype.afterAll = function(fn){
+  if (this.pending) return this;
+  var hook = new Hook('"after all" hook', fn);
+  hook.parent = this;
+  hook.timeout(this.timeout());
+  hook.slow(this.slow());
+  hook.ctx = this.ctx;
+  this._afterAll.push(hook);
+  this.emit('afterAll', hook);
+  return this;
+};
+
+/**
+ * Run `fn(test[, done])` before each test case.
+ *
+ * @param {Function} fn
+ * @return {Suite} for chaining
+ * @api private
+ */
+
+Suite.prototype.beforeEach = function(fn){
+  if (this.pending) return this;
+  var hook = new Hook('"before each" hook', fn);
+  hook.parent = this;
+  hook.timeout(this.timeout());
+  hook.slow(this.slow());
+  hook.ctx = this.ctx;
+  this._beforeEach.push(hook);
+  this.emit('beforeEach', hook);
+  return this;
+};
+
+/**
+ * Run `fn(test[, done])` after each test case.
+ *
+ * @param {Function} fn
+ * @return {Suite} for chaining
+ * @api private
+ */
+
+Suite.prototype.afterEach = function(fn){
+  if (this.pending) return this;
+  var hook = new Hook('"after each" hook', fn);
+  hook.parent = this;
+  hook.timeout(this.timeout());
+  hook.slow(this.slow());
+  hook.ctx = this.ctx;
+  this._afterEach.push(hook);
+  this.emit('afterEach', hook);
+  return this;
+};
+
+/**
+ * Add a test `suite`.
+ *
+ * @param {Suite} suite
+ * @return {Suite} for chaining
+ * @api private
+ */
+
+Suite.prototype.addSuite = function(suite){
+  suite.parent = this;
+  suite.timeout(this.timeout());
+  suite.slow(this.slow());
+  suite.bail(this.bail());
+  this.suites.push(suite);
+  this.emit('suite', suite);
+  return this;
+};
+
+/**
+ * Add a `test` to this suite.
+ *
+ * @param {Test} test
+ * @return {Suite} for chaining
+ * @api private
+ */
+
+Suite.prototype.addTest = function(test){
+  test.parent = this;
+  test.timeout(this.timeout());
+  test.slow(this.slow());
+  test.ctx = this.ctx;
+  this.tests.push(test);
+  this.emit('test', test);
+  return this;
+};
+
+/**
+ * Return the full title generated by recursively
+ * concatenating the parent's full title.
+ *
+ * @return {String}
+ * @api public
+ */
+
+Suite.prototype.fullTitle = function(){
+  if (this.parent) {
+    var full = this.parent.fullTitle();
+    if (full) return full + ' ' + this.title;
+  }
+  return this.title;
+};
+
+/**
+ * Return the total number of tests.
+ *
+ * @return {Number}
+ * @api public
+ */
+
+Suite.prototype.total = function(){
+  return utils.reduce(this.suites, function(sum, suite){
+    return sum + suite.total();
+  }, 0) + this.tests.length;
+};
+
+/**
+ * Iterates through each suite recursively to find
+ * all tests. Applies a function in the format
+ * `fn(test)`.
+ *
+ * @param {Function} fn
+ * @return {Suite}
+ * @api private
+ */
+
+Suite.prototype.eachTest = function(fn){
+  utils.forEach(this.tests, fn);
+  utils.forEach(this.suites, function(suite){
+    suite.eachTest(fn);
+  });
+  return this;
+};

http://git-wip-us.apache.org/repos/asf/incubator-cmda/blob/a9a83675/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/mocha/lib/template.html
----------------------------------------------------------------------
diff --git a/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/mocha/lib/template.html b/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/mocha/lib/template.html
new file mode 100644
index 0000000..0590d4a
--- /dev/null
+++ b/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/mocha/lib/template.html
@@ -0,0 +1,18 @@
+<!DOCTYPE html>
+<html>
+  <head>
+    <title>Mocha</title>
+    <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+    <meta name="viewport" content="width=device-width, initial-scale=1.0">
+    <link rel="stylesheet" href="mocha.css" />
+  </head>
+  <body>
+    <div id="mocha"></div>
+    <script src="mocha.js"></script>
+    <script>mocha.setup('bdd')</script>
+    <script src="tests.js"></script>
+    <script>
+      mocha.run();
+    </script>
+  </body>
+</html>

http://git-wip-us.apache.org/repos/asf/incubator-cmda/blob/a9a83675/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/mocha/lib/test.js
----------------------------------------------------------------------
diff --git a/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/mocha/lib/test.js b/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/mocha/lib/test.js
new file mode 100644
index 0000000..11773e0
--- /dev/null
+++ b/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/mocha/lib/test.js
@@ -0,0 +1,32 @@
+
+/**
+ * Module dependencies.
+ */
+
+var Runnable = require('./runnable');
+
+/**
+ * Expose `Test`.
+ */
+
+module.exports = Test;
+
+/**
+ * Initialize a new `Test` with the given `title` and callback `fn`.
+ *
+ * @param {String} title
+ * @param {Function} fn
+ * @api private
+ */
+
+function Test(title, fn) {
+  Runnable.call(this, title, fn);
+  this.pending = !fn;
+  this.type = 'test';
+}
+
+/**
+ * Inherit from `Runnable.prototype`.
+ */
+
+Test.prototype.__proto__ = Runnable.prototype;

http://git-wip-us.apache.org/repos/asf/incubator-cmda/blob/a9a83675/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/mocha/lib/utils.js
----------------------------------------------------------------------
diff --git a/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/mocha/lib/utils.js b/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/mocha/lib/utils.js
new file mode 100644
index 0000000..37fd5d7
--- /dev/null
+++ b/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/mocha/lib/utils.js
@@ -0,0 +1,299 @@
+/**
+ * Module dependencies.
+ */
+
+var fs = require('fs')
+  , path = require('path')
+  , join = path.join
+  , debug = require('debug')('mocha:watch');
+
+/**
+ * Ignored directories.
+ */
+
+var ignore = ['node_modules', '.git'];
+
+/**
+ * Escape special characters in the given string of html.
+ *
+ * @param  {String} html
+ * @return {String}
+ * @api private
+ */
+
+exports.escape = function(html){
+  return String(html)
+    .replace(/&/g, '&amp;')
+    .replace(/"/g, '&quot;')
+    .replace(/</g, '&lt;')
+    .replace(/>/g, '&gt;');
+};
+
+/**
+ * Array#forEach (<=IE8)
+ *
+ * @param {Array} array
+ * @param {Function} fn
+ * @param {Object} scope
+ * @api private
+ */
+
+exports.forEach = function(arr, fn, scope){
+  for (var i = 0, l = arr.length; i < l; i++)
+    fn.call(scope, arr[i], i);
+};
+
+/**
+ * Array#map (<=IE8)
+ *
+ * @param {Array} array
+ * @param {Function} fn
+ * @param {Object} scope
+ * @api private
+ */
+
+exports.map = function(arr, fn, scope){
+  var result = [];
+  for (var i = 0, l = arr.length; i < l; i++)
+    result.push(fn.call(scope, arr[i], i));
+  return result;
+};
+
+/**
+ * Array#indexOf (<=IE8)
+ *
+ * @parma {Array} arr
+ * @param {Object} obj to find index of
+ * @param {Number} start
+ * @api private
+ */
+
+exports.indexOf = function(arr, obj, start){
+  for (var i = start || 0, l = arr.length; i < l; i++) {
+    if (arr[i] === obj)
+      return i;
+  }
+  return -1;
+};
+
+/**
+ * Array#reduce (<=IE8)
+ *
+ * @param {Array} array
+ * @param {Function} fn
+ * @param {Object} initial value
+ * @api private
+ */
+
+exports.reduce = function(arr, fn, val){
+  var rval = val;
+
+  for (var i = 0, l = arr.length; i < l; i++) {
+    rval = fn(rval, arr[i], i, arr);
+  }
+
+  return rval;
+};
+
+/**
+ * Array#filter (<=IE8)
+ *
+ * @param {Array} array
+ * @param {Function} fn
+ * @api private
+ */
+
+exports.filter = function(arr, fn){
+  var ret = [];
+
+  for (var i = 0, l = arr.length; i < l; i++) {
+    var val = arr[i];
+    if (fn(val, i, arr)) ret.push(val);
+  }
+
+  return ret;
+};
+
+/**
+ * Object.keys (<=IE8)
+ *
+ * @param {Object} obj
+ * @return {Array} keys
+ * @api private
+ */
+
+exports.keys = Object.keys || function(obj) {
+  var keys = []
+    , has = Object.prototype.hasOwnProperty // for `window` on <=IE8
+
+  for (var key in obj) {
+    if (has.call(obj, key)) {
+      keys.push(key);
+    }
+  }
+
+  return keys;
+};
+
+/**
+ * Watch the given `files` for changes
+ * and invoke `fn(file)` on modification.
+ *
+ * @param {Array} files
+ * @param {Function} fn
+ * @api private
+ */
+
+exports.watch = function(files, fn){
+  var options = { interval: 100 };
+  files.forEach(function(file){
+    debug('file %s', file);
+    fs.watchFile(file, options, function(curr, prev){
+      if (prev.mtime < curr.mtime) fn(file);
+    });
+  });
+};
+
+/**
+ * Ignored files.
+ */
+
+function ignored(path){
+  return !~ignore.indexOf(path);
+}
+
+/**
+ * Lookup files in the given `dir`.
+ *
+ * @return {Array}
+ * @api private
+ */
+
+exports.files = function(dir, ret){
+  ret = ret || [];
+
+  fs.readdirSync(dir)
+  .filter(ignored)
+  .forEach(function(path){
+    path = join(dir, path);
+    if (fs.statSync(path).isDirectory()) {
+      exports.files(path, ret);
+    } else if (path.match(/\.(js|coffee|litcoffee|coffee.md)$/)) {
+      ret.push(path);
+    }
+  });
+
+  return ret;
+};
+
+/**
+ * Compute a slug from the given `str`.
+ *
+ * @param {String} str
+ * @return {String}
+ * @api private
+ */
+
+exports.slug = function(str){
+  return str
+    .toLowerCase()
+    .replace(/ +/g, '-')
+    .replace(/[^-\w]/g, '');
+};
+
+/**
+ * Strip the function definition from `str`,
+ * and re-indent for pre whitespace.
+ */
+
+exports.clean = function(str) {
+  str = str
+    .replace(/\r\n?|[\n\u2028\u2029]/g, "\n").replace(/^\uFEFF/, '')
+    .replace(/^function *\(.*\) *{/, '')
+    .replace(/\s+\}$/, '');
+
+  var spaces = str.match(/^\n?( *)/)[1].length
+    , tabs = str.match(/^\n?(\t*)/)[1].length
+    , re = new RegExp('^\n?' + (tabs ? '\t' : ' ') + '{' + (tabs ? tabs : spaces) + '}', 'gm');
+
+  str = str.replace(re, '');
+
+  return exports.trim(str);
+};
+
+/**
+ * Escape regular expression characters in `str`.
+ *
+ * @param {String} str
+ * @return {String}
+ * @api private
+ */
+
+exports.escapeRegexp = function(str){
+  return str.replace(/[-\\^$*+?.()|[\]{}]/g, "\\$&");
+};
+
+/**
+ * Trim the given `str`.
+ *
+ * @param {String} str
+ * @return {String}
+ * @api private
+ */
+
+exports.trim = function(str){
+  return str.replace(/^\s+|\s+$/g, '');
+};
+
+/**
+ * Parse the given `qs`.
+ *
+ * @param {String} qs
+ * @return {Object}
+ * @api private
+ */
+
+exports.parseQuery = function(qs){
+  return exports.reduce(qs.replace('?', '').split('&'), function(obj, pair){
+    var i = pair.indexOf('=')
+      , key = pair.slice(0, i)
+      , val = pair.slice(++i);
+
+    obj[key] = decodeURIComponent(val);
+    return obj;
+  }, {});
+};
+
+/**
+ * Highlight the given string of `js`.
+ *
+ * @param {String} js
+ * @return {String}
+ * @api private
+ */
+
+function highlight(js) {
+  return js
+    .replace(/</g, '&lt;')
+    .replace(/>/g, '&gt;')
+    .replace(/\/\/(.*)/gm, '<span class="comment">//$1</span>')
+    .replace(/('.*?')/gm, '<span class="string">$1</span>')
+    .replace(/(\d+\.\d+)/gm, '<span class="number">$1</span>')
+    .replace(/(\d+)/gm, '<span class="number">$1</span>')
+    .replace(/\bnew *(\w+)/gm, '<span class="keyword">new</span> <span class="init">$1</span>')
+    .replace(/\b(function|new|throw|return|var|if|else)\b/gm, '<span class="keyword">$1</span>')
+}
+
+/**
+ * Highlight the contents of tag `name`.
+ *
+ * @param {String} name
+ * @api private
+ */
+
+exports.highlightTags = function(name) {
+  var code = document.getElementsByTagName(name);
+  for (var i = 0, len = code.length; i < len; ++i) {
+    code[i].innerHTML = highlight(code[i].innerHTML);
+  }
+};

http://git-wip-us.apache.org/repos/asf/incubator-cmda/blob/a9a83675/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/mocha/package.json
----------------------------------------------------------------------
diff --git a/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/mocha/package.json b/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/mocha/package.json
new file mode 100644
index 0000000..2f5f07a
--- /dev/null
+++ b/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/mocha/package.json
@@ -0,0 +1,49 @@
+{
+  "name": "mocha",
+  "version": "1.17.1",
+  "description": "simple, flexible, fun test framework",
+  "keywords": [
+    "mocha",
+    "test",
+    "bdd",
+    "tdd",
+    "tap"
+  ],
+  "author": "TJ Holowaychuk <tj@vision-media.ca>",
+  "repository": {
+    "type": "git",
+    "url": "git://github.com/visionmedia/mocha.git"
+  },
+  "main": "./index",
+  "bin": {
+    "mocha": "./bin/mocha",
+    "_mocha": "./bin/_mocha"
+  },
+  "engines": {
+    "node": ">= 0.4.x"
+  },
+  "scripts": {
+    "test": "make test-all"
+  },
+  "dependencies": {
+    "commander": "2.0.0",
+    "growl": "1.7.x",
+    "jade": "0.26.3",
+    "diff": "1.0.7",
+    "debug": "*",
+    "mkdirp": "0.3.5",
+    "glob": "3.2.3"
+  },
+  "devDependencies": {
+    "should": ">= 2.0.x",
+    "coffee-script": "1.2"
+  },
+  "files": [
+    "bin",
+    "images",
+    "lib",
+    "index.js",
+    "mocha.css",
+    "mocha.js"
+  ]
+}

http://git-wip-us.apache.org/repos/asf/incubator-cmda/blob/a9a83675/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/bin/node-gyp-bin/node-gyp
----------------------------------------------------------------------
diff --git a/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/bin/node-gyp-bin/node-gyp b/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/bin/node-gyp-bin/node-gyp
new file mode 100644
index 0000000..345f07a
--- /dev/null
+++ b/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/bin/node-gyp-bin/node-gyp
@@ -0,0 +1,2 @@
+#!/usr/bin/env sh
+node "`dirname "$0"`/../../node_modules/node-gyp/bin/node-gyp.js" "$@"

http://git-wip-us.apache.org/repos/asf/incubator-cmda/blob/a9a83675/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/bin/node-gyp-bin/node-gyp.cmd
----------------------------------------------------------------------
diff --git a/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/bin/node-gyp-bin/node-gyp.cmd b/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/bin/node-gyp-bin/node-gyp.cmd
new file mode 100644
index 0000000..c2563ea
--- /dev/null
+++ b/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/bin/node-gyp-bin/node-gyp.cmd
@@ -0,0 +1 @@
+node "%~dp0\..\..\node_modules\node-gyp\bin\node-gyp.js" %*

http://git-wip-us.apache.org/repos/asf/incubator-cmda/blob/a9a83675/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/bin/npm
----------------------------------------------------------------------
diff --git a/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/bin/npm b/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/bin/npm
new file mode 100644
index 0000000..07ade35
--- /dev/null
+++ b/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/bin/npm
@@ -0,0 +1,13 @@
+#!/bin/sh
+
+basedir=`dirname "$0"`
+
+case `uname` in
+    *CYGWIN*) basedir=`cygpath -w "$basedir"`;;
+esac
+
+if [ -x "$basedir/node.exe" ]; then
+  "$basedir/node.exe" "$basedir/node_modules/npm/bin/npm-cli.js" "$@"
+else
+  node "$basedir/node_modules/npm/bin/npm-cli.js" "$@"
+fi

http://git-wip-us.apache.org/repos/asf/incubator-cmda/blob/a9a83675/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/bin/npm-cli.js
----------------------------------------------------------------------
diff --git a/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/bin/npm-cli.js b/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/bin/npm-cli.js
new file mode 100644
index 0000000..ef88735
--- /dev/null
+++ b/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/bin/npm-cli.js
@@ -0,0 +1,86 @@
+#!/usr/bin/env node
+;(function () { // wrapper in case we're in module_context mode
+
+// windows: running "npm blah" in this folder will invoke WSH, not node.
+if (typeof WScript !== "undefined") {
+  WScript.echo("npm does not work when run\n"
+              +"with the Windows Scripting Host\n\n"
+              +"'cd' to a different directory,\n"
+              +"or type 'npm.cmd <args>',\n"
+              +"or type 'node npm <args>'.")
+  WScript.quit(1)
+  return
+}
+
+
+process.title = "npm"
+
+var log = require("npmlog")
+log.pause() // will be unpaused when config is loaded.
+log.info("it worked if it ends with", "ok")
+
+var fs = require("graceful-fs")
+  , path = require("path")
+  , npm = require("../lib/npm.js")
+  , npmconf = require("npmconf")
+  , errorHandler = require("../lib/utils/error-handler.js")
+
+  , configDefs = npmconf.defs
+  , shorthands = configDefs.shorthands
+  , types = configDefs.types
+  , nopt = require("nopt")
+
+// if npm is called as "npmg" or "npm_g", then
+// run in global mode.
+if (path.basename(process.argv[1]).slice(-1)  === "g") {
+  process.argv.splice(1, 1, "npm", "-g")
+}
+
+log.verbose("cli", process.argv)
+
+var conf = nopt(types, shorthands)
+npm.argv = conf.argv.remain
+if (npm.deref(npm.argv[0])) npm.command = npm.argv.shift()
+else conf.usage = true
+
+
+if (conf.version) {
+  console.log(npm.version)
+  return
+}
+
+if (conf.versions) {
+  npm.command = "version"
+  conf.usage = false
+  npm.argv = []
+}
+
+log.info("using", "npm@%s", npm.version)
+log.info("using", "node@%s", process.version)
+
+// make sure that this version of node works with this version of npm.
+var semver = require("semver")
+  , nodeVer = process.version
+  , reqVer = npm.nodeVersionRequired
+if (reqVer && !semver.satisfies(nodeVer, reqVer)) {
+  return errorHandler(new Error(
+    "npm doesn't work with node " + nodeVer
+    + "\nRequired: node@" + reqVer), true)
+}
+
+process.on("uncaughtException", errorHandler)
+
+if (conf.usage && npm.command !== "help") {
+  npm.argv.unshift(npm.command)
+  npm.command = "help"
+}
+
+// now actually fire up npm and run the command.
+// this is how to use npm programmatically:
+conf._exit = true
+npm.load(conf, function (er) {
+  if (er) return errorHandler(er)
+  npm.commands[npm.command](npm.argv, errorHandler)
+})
+
+})()

http://git-wip-us.apache.org/repos/asf/incubator-cmda/blob/a9a83675/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/bin/npm.cmd
----------------------------------------------------------------------
diff --git a/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/bin/npm.cmd b/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/bin/npm.cmd
new file mode 100644
index 0000000..7720e20
--- /dev/null
+++ b/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/bin/npm.cmd
@@ -0,0 +1,6 @@
+:: Created by npm, please don't edit manually.
+@IF EXIST "%~dp0\node.exe" (
+  "%~dp0\node.exe" "%~dp0\.\node_modules\npm\bin\npm-cli.js" %*
+) ELSE (
+  node "%~dp0\.\node_modules\npm\bin\npm-cli.js" %*
+)

http://git-wip-us.apache.org/repos/asf/incubator-cmda/blob/a9a83675/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/bin/read-package-json.js
----------------------------------------------------------------------
diff --git a/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/bin/read-package-json.js b/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/bin/read-package-json.js
new file mode 100644
index 0000000..3e5a0c7
--- /dev/null
+++ b/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/bin/read-package-json.js
@@ -0,0 +1,22 @@
+var argv = process.argv
+if (argv.length < 3) {
+  console.error("Usage: read-package.json <file> [<fields> ...]")
+  process.exit(1)
+}
+
+var fs = require("fs")
+  , file = argv[2]
+  , readJson = require("read-package-json")
+
+readJson(file, function (er, data) {
+  if (er) throw er
+  if (argv.length === 3) console.log(data)
+  else argv.slice(3).forEach(function (field) {
+    field = field.split(".")
+    var val = data
+    field.forEach(function (f) {
+      val = val[f]
+    })
+    console.log(val)
+  })
+})

http://git-wip-us.apache.org/repos/asf/incubator-cmda/blob/a9a83675/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/lib/adduser.js
----------------------------------------------------------------------
diff --git a/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/lib/adduser.js b/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/lib/adduser.js
new file mode 100644
index 0000000..739f142
--- /dev/null
+++ b/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/lib/adduser.js
@@ -0,0 +1,138 @@
+
+module.exports = adduser
+
+var log = require("npmlog")
+  , npm = require("./npm.js")
+  , registry = npm.registry
+  , read = require("read")
+  , userValidate = require("npm-user-validate")
+  , crypto
+
+try {
+  crypto = process.binding("crypto") && require("crypto")
+} catch (ex) {}
+
+adduser.usage = "npm adduser\nThen enter stuff at the prompts"
+
+function adduser (args, cb) {
+  if (!crypto) return cb(new Error(
+    "You must compile node with ssl support to use the adduser feature"))
+
+  var c = { u : npm.config.get("username") || ""
+          , p : npm.config.get("_password") || ""
+          , e : npm.config.get("email") || ""
+          }
+    , changed = false
+    , u = {}
+    , fns = [readUsername, readPassword, readEmail, save]
+
+  loop()
+  function loop (er) {
+    if (er) return cb(er)
+    var fn = fns.shift()
+    if (fn) return fn(c, u, loop)
+    cb()
+  }
+}
+
+function readUsername (c, u, cb) {
+  var v = userValidate.username
+  read({prompt: "Username: ", default: c.u || ""}, function (er, un) {
+    if (er) {
+      return cb(er.message === "cancelled" ? er.message : er)
+    }
+
+    // make sure it's valid.  we have to do this here, because
+    // couchdb will only ever say "bad password" with a 401 when
+    // you try to PUT a _users record that the validate_doc_update
+    // rejects for *any* reason.
+
+    if (!un) {
+      return readUsername(c, u, cb)
+    }
+
+    var error = v(un)
+    if (error) {
+      log.warn(error.message)
+      return readUsername(c, u, cb)
+    }
+
+    c.changed = c.u !== un
+    u.u = un
+    cb(er)
+  })
+}
+
+function readPassword (c, u, cb) {
+  var v = userValidate.pw
+
+  if (!c.changed) {
+    u.p = c.p
+    return cb()
+  }
+  read({prompt: "Password: ", silent: true}, function (er, pw) {
+    if (er) {
+      return cb(er.message === "cancelled" ? er.message : er)
+    }
+
+    if (!pw) {
+      return readPassword(c, u, cb)
+    }
+
+    var error = v(pw)
+    if (error) {
+      log.warn(error.message)
+      return readPassword(c, u, cb)
+    }
+
+    u.p = pw
+    cb(er)
+  })
+}
+
+function readEmail (c, u, cb) {
+  var v = userValidate.email
+  var r = { prompt: "Email: (this IS public) ", default: c.e || "" }
+  read(r, function (er, em) {
+    if (er) {
+      return cb(er.message === "cancelled" ? er.message : er)
+    }
+
+    if (!em) {
+      return readEmail(c, u, cb)
+    }
+
+    var error = v(em)
+    if (error) {
+      log.warn(error.message)
+      return readEmail(c, u, cb)
+    }
+
+    u.e = em
+    cb(er)
+  })
+}
+
+function save (c, u, cb) {
+  if (c.changed) {
+    delete registry.auth
+    delete registry.username
+    delete registry.password
+    registry.username = u.u
+    registry.password = u.p
+  }
+
+  // save existing configs, but yank off for this PUT
+  registry.adduser(u.u, u.p, u.e, function (er) {
+    if (er) return cb(er)
+    registry.username = u.u
+    registry.password = u.p
+    registry.email = u.e
+    npm.config.set("username", u.u, "user")
+    npm.config.set("_password", u.p, "user")
+    npm.config.set("email", u.e, "user")
+    npm.config.del("_token", "user")
+    log.info("adduser", "Authorized user %s", u.u)
+    npm.config.save("user", cb)
+  })
+}

http://git-wip-us.apache.org/repos/asf/incubator-cmda/blob/a9a83675/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/lib/bin.js
----------------------------------------------------------------------
diff --git a/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/lib/bin.js b/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/lib/bin.js
new file mode 100644
index 0000000..719e887
--- /dev/null
+++ b/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/lib/bin.js
@@ -0,0 +1,18 @@
+module.exports = bin
+
+var npm = require("./npm.js")
+
+bin.usage = "npm bin\nnpm bin -g\n(just prints the bin folder)"
+
+function bin (args, silent, cb) {
+  if (typeof cb !== "function") cb = silent, silent = false
+  var b = npm.bin
+    , PATH = (process.env.PATH || "").split(":")
+
+  if (!silent) console.log(b)
+  process.nextTick(cb.bind(this, null, b))
+
+  if (npm.config.get("global") && PATH.indexOf(b) === -1) {
+    npm.config.get("logstream").write("(not in PATH env variable)\n")
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-cmda/blob/a9a83675/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/lib/bugs.js
----------------------------------------------------------------------
diff --git a/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/lib/bugs.js b/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/lib/bugs.js
new file mode 100644
index 0000000..be79ab3
--- /dev/null
+++ b/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/lib/bugs.js
@@ -0,0 +1,61 @@
+
+module.exports = bugs
+
+bugs.usage = "npm bugs <pkgname>"
+
+var npm = require("./npm.js")
+  , registry = npm.registry
+  , log = require("npmlog")
+  , opener = require("opener")
+  , path = require("path")
+  , readJson = require("read-package-json")
+  , fs = require("fs")
+
+bugs.completion = function (opts, cb) {
+  if (opts.conf.argv.remain.length > 2) return cb()
+  registry.get("/-/short", 60000, function (er, list) {
+    return cb(null, list || [])
+  })
+}
+
+function bugs (args, cb) {
+  var n = args.length && args[0].split("@").shift() || '.'
+  fs.stat(n, function (er, s) {
+    if (er && er.code === "ENOENT") return callRegistry(n, cb)
+    else if (er) return cb (er)
+    if (!s.isDirectory()) return callRegistry(n, cb)
+    readJson(path.resolve(n, "package.json"), function(er, d) {
+      if (er) return cb(er)
+      getUrlAndOpen(d, cb)
+    })
+  })
+}
+
+function getUrlAndOpen (d, cb) {
+  var bugs = d.bugs
+    , repo = d.repository || d.repositories
+    , url
+  if (bugs) {
+    url = (typeof url === "string") ? bugs : bugs.url
+  } else if (repo) {
+    if (Array.isArray(repo)) repo = repo.shift()
+    if (repo.hasOwnProperty("url")) repo = repo.url
+    log.verbose("repository", repo)
+    if (bugs && bugs.match(/^(https?:\/\/|git(:\/\/|@))github.com/)) {
+      url = bugs.replace(/^git(@|:\/\/)/, "https://")
+                .replace(/^https?:\/\/github.com:/, "https://github.com/")
+                .replace(/\.git$/, '')+"/issues"
+    }
+  }
+  if (!url) {
+    url = "https://npmjs.org/package/" + d.name
+  }
+  opener(url, { command: npm.config.get("browser") }, cb)
+}
+
+function callRegistry (n, cb) {
+  registry.get(n + "/latest", 3600, function (er, d) {
+    if (er) return cb(er)
+    getUrlAndOpen (d, cb)
+  })
+}

http://git-wip-us.apache.org/repos/asf/incubator-cmda/blob/a9a83675/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/lib/build.js
----------------------------------------------------------------------
diff --git a/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/lib/build.js b/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/lib/build.js
new file mode 100644
index 0000000..4b73c1e
--- /dev/null
+++ b/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/lib/build.js
@@ -0,0 +1,228 @@
+// npm build command
+
+// everything about the installation after the creation of
+// the .npm/{name}/{version}/package folder.
+// linking the modules into the npm.root,
+// resolving dependencies, etc.
+
+// This runs AFTER install or link are completed.
+
+var npm = require("./npm.js")
+  , log = require("npmlog")
+  , chain = require("slide").chain
+  , fs = require("graceful-fs")
+  , path = require("path")
+  , lifecycle = require("./utils/lifecycle.js")
+  , readJson = require("read-package-json")
+  , link = require("./utils/link.js")
+  , linkIfExists = link.ifExists
+  , cmdShim = require("cmd-shim")
+  , cmdShimIfExists = cmdShim.ifExists
+  , asyncMap = require("slide").asyncMap
+
+module.exports = build
+build.usage = "npm build <folder>\n(this is plumbing)"
+
+build._didBuild = {}
+build._noLC = {}
+function build (args, global, didPre, didRB, cb) {
+  if (typeof cb !== "function") cb = didRB, didRB = false
+  if (typeof cb !== "function") cb = didPre, didPre = false
+  if (typeof cb !== "function") {
+    cb = global, global = npm.config.get("global")
+  }
+  // it'd be nice to asyncMap these, but actually, doing them
+  // in parallel generally munges up the output from node-waf
+  var builder = build_(global, didPre, didRB)
+  chain(args.map(function (arg) { return function (cb) {
+    builder(arg, cb)
+  }}), cb)
+}
+
+function build_ (global, didPre, didRB) { return function (folder, cb) {
+  folder = path.resolve(folder)
+  build._didBuild[folder] = true
+  log.info("build", folder)
+  readJson(path.resolve(folder, "package.json"), function (er, pkg) {
+    if (er) return cb(er)
+    chain
+      ( [ !didPre && [lifecycle, pkg, "preinstall", folder]
+        , [linkStuff, pkg, folder, global, didRB]
+        , pkg.name === "npm" && [writeBuiltinConf, folder]
+        , didPre !== build._noLC && [lifecycle, pkg, "install", folder]
+        , didPre !== build._noLC && [lifecycle, pkg, "postinstall", folder]
+        , didPre !== build._noLC
+          && npm.config.get("npat")
+          && [lifecycle, pkg, "test", folder] ]
+      , cb )
+  })
+}}
+
+function writeBuiltinConf (folder, cb) {
+  // the builtin config is "sticky". Any time npm installs itself,
+  // it puts its builtin config file there, as well.
+  if (!npm.config.usingBuiltin
+      || folder !== path.dirname(__dirname)) {
+    return cb()
+  }
+  npm.config.save("builtin", cb)
+}
+
+function linkStuff (pkg, folder, global, didRB, cb) {
+  // allow to opt out of linking binaries.
+  if (npm.config.get("bin-links") === false) return cb()
+
+  // if it's global, and folder is in {prefix}/node_modules,
+  // then bins are in {prefix}/bin
+  // otherwise, then bins are in folder/../.bin
+  var parent = path.dirname(folder)
+    , gnm = global && npm.globalDir
+    , top = parent === npm.dir
+    , gtop = parent === gnm
+
+  log.verbose("linkStuff", [global, gnm, gtop, parent])
+  log.info("linkStuff", pkg._id)
+
+  shouldWarn(pkg, folder, global, function() {
+    asyncMap( [linkBins, linkMans, !didRB && rebuildBundles]
+            , function (fn, cb) {
+      if (!fn) return cb()
+      log.verbose(fn.name, pkg._id)
+      fn(pkg, folder, parent, gtop, cb)
+    }, cb)
+  })
+}
+
+function shouldWarn(pkg, folder, global, cb) {
+  var parent = path.dirname(folder)
+    , top = parent === npm.dir
+    , cwd = process.cwd()
+
+  readJson(path.resolve(cwd, "package.json"), function(er, topPkg) {
+    if (er) return cb(er)
+
+    var linkedPkg = path.basename(cwd)
+      , currentPkg = path.basename(folder)
+
+    // current searched package is the linked package on first call
+    if (linkedPkg !== currentPkg) {
+
+      if (!topPkg.dependencies) return cb()
+
+      // don't generate a warning if it's listed in dependencies
+      if (Object.keys(topPkg.dependencies).indexOf(currentPkg) === -1) {
+
+        if (top && pkg.preferGlobal && !global) {
+          log.warn("prefer global", pkg._id + " should be installed with -g")
+        }
+      }
+    }
+
+    cb()
+  })
+}
+
+function rebuildBundles (pkg, folder, parent, gtop, cb) {
+  if (!npm.config.get("rebuild-bundle")) return cb()
+
+  var deps = Object.keys(pkg.dependencies || {})
+             .concat(Object.keys(pkg.devDependencies || {}))
+    , bundles = pkg.bundleDependencies || pkg.bundledDependencies || []
+
+  fs.readdir(path.resolve(folder, "node_modules"), function (er, files) {
+    // error means no bundles
+    if (er) return cb()
+
+    log.verbose("rebuildBundles", files)
+    // don't asyncMap these, because otherwise build script output
+    // gets interleaved and is impossible to read
+    chain(files.filter(function (file) {
+      // rebuild if:
+      // not a .folder, like .bin or .hooks
+      return !file.match(/^[\._-]/)
+          // not some old 0.x style bundle
+          && file.indexOf("@") === -1
+          // either not a dep, or explicitly bundled
+          && (deps.indexOf(file) === -1 || bundles.indexOf(file) !== -1)
+    }).map(function (file) {
+      file = path.resolve(folder, "node_modules", file)
+      return function (cb) {
+        if (build._didBuild[file]) return cb()
+        log.verbose("rebuild bundle", file)
+        // if file is not a package dir, then don't do it.
+        fs.lstat(path.resolve(file, "package.json"), function (er, st) {
+          if (er) return cb()
+          build_(false)(file, cb)
+        })
+    }}), cb)
+  })
+}
+
+function linkBins (pkg, folder, parent, gtop, cb) {
+  if (!pkg.bin || !gtop && path.basename(parent) !== "node_modules") {
+    return cb()
+  }
+  var binRoot = gtop ? npm.globalBin
+                     : path.resolve(parent, ".bin")
+  log.verbose("link bins", [pkg.bin, binRoot, gtop])
+
+  asyncMap(Object.keys(pkg.bin), function (b, cb) {
+    linkBin( path.resolve(folder, pkg.bin[b])
+           , path.resolve(binRoot, b)
+           , gtop && folder
+           , function (er) {
+      if (er) return cb(er)
+      // bins should always be executable.
+      // XXX skip chmod on windows?
+      var src = path.resolve(folder, pkg.bin[b])
+      fs.chmod(src, npm.modes.exec, function (er) {
+        if (er && er.code === "ENOENT" && npm.config.get("ignore-scripts")) {
+          return cb()
+        }
+        if (er || !gtop) return cb(er)
+        var dest = path.resolve(binRoot, b)
+          , out = npm.config.get("parseable")
+                ? dest + "::" + src + ":BINFILE"
+                : dest + " -> " + src
+        console.log(out)
+        cb()
+      })
+    })
+  }, cb)
+}
+
+function linkBin (from, to, gently, cb) {
+  if (process.platform !== "win32") {
+    return linkIfExists(from, to, gently, cb)
+  } else {
+    return cmdShimIfExists(from, to, cb)
+  }
+}
+
+function linkMans (pkg, folder, parent, gtop, cb) {
+  if (!pkg.man || !gtop || process.platform === "win32") return cb()
+
+  var manRoot = path.resolve(npm.config.get("prefix"), "share", "man")
+
+  // make sure that the mans are unique.
+  // otherwise, if there are dupes, it'll fail with EEXIST
+  var set = pkg.man.reduce(function (acc, man) {
+    acc[path.basename(man)] = man
+    return acc
+  }, {})
+  pkg.man = pkg.man.filter(function (man) {
+    return set[path.basename(man)] === man
+  })
+
+  asyncMap(pkg.man, function (man, cb) {
+    if (typeof man !== "string") return cb()
+    var parseMan = man.match(/(.*\.([0-9]+)(\.gz)?)$/)
+      , stem = parseMan[1]
+      , sxn = parseMan[2]
+      , gz = parseMan[3] || ""
+      , bn = path.basename(stem)
+      , manDest = path.join(manRoot, "man" + sxn, bn)
+
+    linkIfExists(man, manDest, gtop && folder, cb)
+  }, cb)
+}

http://git-wip-us.apache.org/repos/asf/incubator-cmda/blob/a9a83675/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/lib/cache.js
----------------------------------------------------------------------
diff --git a/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/lib/cache.js b/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/lib/cache.js
new file mode 100644
index 0000000..c182817
--- /dev/null
+++ b/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/lib/cache.js
@@ -0,0 +1,1260 @@
+// XXX lib/utils/tar.js and this file need to be rewritten.
+
+// URL-to-cache folder mapping:
+// : -> !
+// @ -> _
+// http://registry.npmjs.org/foo/version -> cache/http!/...
+//
+
+/*
+fetching a url:
+1. Check for url in inFlightUrls.  If present, add cb, and return.
+2. create inFlightURL list
+3. Acquire lock at {cache}/{sha(url)}.lock
+   retries = {cache-lock-retries, def=3}
+   stale = {cache-lock-stale, def=30000}
+   wait = {cache-lock-wait, def=100}
+4. if lock can't be acquired, then fail
+5. fetch url, clear lock, call cbs
+
+cache folders:
+1. urls: http!/server.com/path/to/thing
+2. c:\path\to\thing: file!/c!/path/to/thing
+3. /path/to/thing: file!/path/to/thing
+4. git@ private: git_github.com!isaacs/npm
+5. git://public: git!/github.com/isaacs/npm
+6. git+blah:// git-blah!/server.com/foo/bar
+
+adding a folder:
+1. tar into tmp/random/package.tgz
+2. untar into tmp/random/contents/package, stripping one dir piece
+3. tar tmp/random/contents/package to cache/n/v/package.tgz
+4. untar cache/n/v/package.tgz into cache/n/v/package
+5. rm tmp/random
+
+Adding a url:
+1. fetch to tmp/random/package.tgz
+2. goto folder(2)
+
+adding a name@version:
+1. registry.get(name/version)
+2. if response isn't 304, add url(dist.tarball)
+
+adding a name@range:
+1. registry.get(name)
+2. Find a version that satisfies
+3. add name@version
+
+adding a local tarball:
+1. untar to tmp/random/{blah}
+2. goto folder(2)
+*/
+
+exports = module.exports = cache
+cache.read = read
+cache.clean = clean
+cache.unpack = unpack
+cache.lock = lock
+cache.unlock = unlock
+
+var mkdir = require("mkdirp")
+  , spawn = require("child_process").spawn
+  , exec = require("child_process").execFile
+  , once = require("once")
+  , fetch = require("./utils/fetch.js")
+  , npm = require("./npm.js")
+  , fs = require("graceful-fs")
+  , rm = require("rimraf")
+  , readJson = require("read-package-json")
+  , registry = npm.registry
+  , log = require("npmlog")
+  , path = require("path")
+  , sha = require("sha")
+  , asyncMap = require("slide").asyncMap
+  , semver = require("semver")
+  , tar = require("./utils/tar.js")
+  , fileCompletion = require("./utils/completion/file-completion.js")
+  , url = require("url")
+  , chownr = require("chownr")
+  , lockFile = require("lockfile")
+  , crypto = require("crypto")
+  , retry = require("retry")
+  , zlib = require("zlib")
+  , chmodr = require("chmodr")
+  , which = require("which")
+  , isGitUrl = require("./utils/is-git-url.js")
+  , pathIsInside = require("path-is-inside")
+
+cache.usage = "npm cache add <tarball file>"
+            + "\nnpm cache add <folder>"
+            + "\nnpm cache add <tarball url>"
+            + "\nnpm cache add <git url>"
+            + "\nnpm cache add <name>@<version>"
+            + "\nnpm cache ls [<path>]"
+            + "\nnpm cache clean [<pkg>[@<version>]]"
+
+cache.completion = function (opts, cb) {
+
+  var argv = opts.conf.argv.remain
+  if (argv.length === 2) {
+    return cb(null, ["add", "ls", "clean"])
+  }
+
+  switch (argv[2]) {
+    case "clean":
+    case "ls":
+      // cache and ls are easy, because the completion is
+      // what ls_ returns anyway.
+      // just get the partial words, minus the last path part
+      var p = path.dirname(opts.partialWords.slice(3).join("/"))
+      if (p === ".") p = ""
+      return ls_(p, 2, cb)
+    case "add":
+      // Same semantics as install and publish.
+      return npm.commands.install.completion(opts, cb)
+  }
+}
+
+function cache (args, cb) {
+  var cmd = args.shift()
+  switch (cmd) {
+    case "rm": case "clear": case "clean": return clean(args, cb)
+    case "list": case "sl": case "ls": return ls(args, cb)
+    case "add": return add(args, cb)
+    default: return cb(new Error(
+      "Invalid cache action: "+cmd))
+  }
+}
+
+// if the pkg and ver are in the cache, then
+// just do a readJson and return.
+// if they're not, then fetch them from the registry.
+function read (name, ver, forceBypass, cb) {
+  if (typeof cb !== "function") cb = forceBypass, forceBypass = true
+  var jsonFile = path.join(npm.cache, name, ver, "package", "package.json")
+  function c (er, data) {
+    if (data) deprCheck(data)
+    return cb(er, data)
+  }
+
+  if (forceBypass && npm.config.get("force")) {
+    log.verbose("using force", "skipping cache")
+    return addNamed(name, ver, c)
+  }
+
+  readJson(jsonFile, function (er, data) {
+    er = needName(er, data)
+    er = needVersion(er, data)
+    if (er && er.code !== "ENOENT" && er.code !== "ENOTDIR") return cb(er)
+    if (er) return addNamed(name, ver, c)
+    deprCheck(data)
+    c(er, data)
+  })
+}
+
+// npm cache ls [<path>]
+function ls (args, cb) {
+  args = args.join("/").split("@").join("/")
+  if (args.substr(-1) === "/") args = args.substr(0, args.length - 1)
+  var prefix = npm.config.get("cache")
+  if (0 === prefix.indexOf(process.env.HOME)) {
+    prefix = "~" + prefix.substr(process.env.HOME.length)
+  }
+  ls_(args, npm.config.get("depth"), function (er, files) {
+    console.log(files.map(function (f) {
+      return path.join(prefix, f)
+    }).join("\n").trim())
+    cb(er, files)
+  })
+}
+
+// Calls cb with list of cached pkgs matching show.
+function ls_ (req, depth, cb) {
+  return fileCompletion(npm.cache, req, depth, cb)
+}
+
+// npm cache clean [<path>]
+function clean (args, cb) {
+  if (!cb) cb = args, args = []
+  if (!args) args = []
+  args = args.join("/").split("@").join("/")
+  if (args.substr(-1) === "/") args = args.substr(0, args.length - 1)
+  var f = path.join(npm.cache, path.normalize(args))
+  if (f === npm.cache) {
+    fs.readdir(npm.cache, function (er, files) {
+      if (er) return cb()
+      asyncMap( files.filter(function (f) {
+                  return npm.config.get("force") || f !== "-"
+                }).map(function (f) {
+                  return path.join(npm.cache, f)
+                })
+              , rm, cb )
+    })
+  } else rm(path.join(npm.cache, path.normalize(args)), cb)
+}
+
+// npm cache add <tarball-url>
+// npm cache add <pkg> <ver>
+// npm cache add <tarball>
+// npm cache add <folder>
+cache.add = function (pkg, ver, scrub, cb) {
+  if (typeof cb !== "function") cb = scrub, scrub = false
+  if (typeof cb !== "function") cb = ver, ver = null
+  if (scrub) {
+    return clean([], function (er) {
+      if (er) return cb(er)
+      add([pkg, ver], cb)
+    })
+  }
+  log.verbose("cache add", [pkg, ver])
+  return add([pkg, ver], cb)
+}
+
+function add (args, cb) {
+  // this is hot code.  almost everything passes through here.
+  // the args can be any of:
+  // ["url"]
+  // ["pkg", "version"]
+  // ["pkg@version"]
+  // ["pkg", "url"]
+  // This is tricky, because urls can contain @
+  // Also, in some cases we get [name, null] rather
+  // that just a single argument.
+
+  var usage = "Usage:\n"
+            + "    npm cache add <tarball-url>\n"
+            + "    npm cache add <pkg>@<ver>\n"
+            + "    npm cache add <tarball>\n"
+            + "    npm cache add <folder>\n"
+    , name
+    , spec
+
+  if (args[1] === undefined) args[1] = null
+
+  // at this point the args length must ==2
+  if (args[1] !== null) {
+    name = args[0]
+    spec = args[1]
+  } else if (args.length === 2) {
+    spec = args[0]
+  }
+
+  log.verbose("cache add", "name=%j spec=%j args=%j", name, spec, args)
+
+
+  if (!name && !spec) return cb(usage)
+
+  // see if the spec is a url
+  // otherwise, treat as name@version
+  var p = url.parse(spec) || {}
+  log.verbose("parsed url", p)
+
+  // it could be that we got name@http://blah
+  // in that case, we will not have a protocol now, but if we
+  // split and check, we will.
+  if (!name && !p.protocol && spec.indexOf("@") !== -1) {
+    spec = spec.split("@")
+    name = spec.shift()
+    spec = spec.join("@")
+    return add([name, spec], cb)
+  }
+
+  switch (p.protocol) {
+    case "http:":
+    case "https:":
+      return addRemoteTarball(spec, null, name, cb)
+
+    default:
+      if (isGitUrl(p))
+        return addRemoteGit(spec, p, name, false, cb)
+
+      // if we have a name and a spec, then try name@spec
+      // if not, then try just spec (which may try name@"" if not found)
+      if (name) {
+        addNamed(name, spec, cb)
+      } else {
+        addLocal(spec, cb)
+      }
+  }
+}
+
+function fetchAndShaCheck (u, tmp, shasum, cb) {
+  fetch(u, tmp, function (er, response) {
+    if (er) {
+      log.error("fetch failed", u)
+      return cb(er, response)
+    }
+    if (!shasum) return cb(null, response)
+    // validate that the url we just downloaded matches the expected shasum.
+    sha.check(tmp, shasum, function (er) {
+      if (er != null && er.message) {
+        // add original filename for better debuggability
+        er.message = er.message + '\n' + 'From:     ' + u
+      }
+      return cb(er, response, shasum)
+    })
+  })
+}
+
+// Only have a single download action at once for a given url
+// additional calls stack the callbacks.
+var inFlightURLs = {}
+function addRemoteTarball (u, shasum, name, cb_) {
+  if (typeof cb_ !== "function") cb_ = name, name = ""
+  if (typeof cb_ !== "function") cb_ = shasum, shasum = null
+
+  if (!inFlightURLs[u]) inFlightURLs[u] = []
+  var iF = inFlightURLs[u]
+  iF.push(cb_)
+  if (iF.length > 1) return
+
+  function cb (er, data) {
+    if (data) {
+      data._from = u
+      data._resolved = u
+    }
+    unlock(u, function () {
+      var c
+      while (c = iF.shift()) c(er, data)
+      delete inFlightURLs[u]
+    })
+  }
+
+  var tmp = path.join(npm.tmp, Date.now()+"-"+Math.random(), "tmp.tgz")
+
+  lock(u, function (er) {
+    if (er) return cb(er)
+
+    log.verbose("addRemoteTarball", [u, shasum])
+    mkdir(path.dirname(tmp), function (er) {
+      if (er) return cb(er)
+      addRemoteTarball_(u, tmp, shasum, done)
+    })
+  })
+
+  function done (er, resp, shasum) {
+    if (er) return cb(er)
+    addLocalTarball(tmp, name, shasum, cb)
+  }
+}
+
+function addRemoteTarball_(u, tmp, shasum, cb) {
+  // Tuned to spread 3 attempts over about a minute.
+  // See formula at <https://github.com/tim-kos/node-retry>.
+  var operation = retry.operation
+    ( { retries: npm.config.get("fetch-retries")
+      , factor: npm.config.get("fetch-retry-factor")
+      , minTimeout: npm.config.get("fetch-retry-mintimeout")
+      , maxTimeout: npm.config.get("fetch-retry-maxtimeout") })
+
+  operation.attempt(function (currentAttempt) {
+    log.info("retry", "fetch attempt " + currentAttempt
+      + " at " + (new Date()).toLocaleTimeString())
+    fetchAndShaCheck(u, tmp, shasum, function (er, response, shasum) {
+      // Only retry on 408, 5xx or no `response`.
+      var sc = response && response.statusCode
+      var statusRetry = !sc || (sc === 408 || sc >= 500)
+      if (er && statusRetry && operation.retry(er)) {
+        log.info("retry", "will retry, error on last attempt: " + er)
+        return
+      }
+      cb(er, response, shasum)
+    })
+  })
+}
+
+// 1. cacheDir = path.join(cache,'_git-remotes',sha1(u))
+// 2. checkGitDir(cacheDir) ? 4. : 3. (rm cacheDir if necessary)
+// 3. git clone --mirror u cacheDir
+// 4. cd cacheDir && git fetch -a origin
+// 5. git archive /tmp/random.tgz
+// 6. addLocalTarball(/tmp/random.tgz) <gitref> --format=tar --prefix=package/
+// silent flag is used if this should error quietly
+function addRemoteGit (u, parsed, name, silent, cb_) {
+  if (typeof cb_ !== "function") cb_ = name, name = null
+
+  if (!inFlightURLs[u]) inFlightURLs[u] = []
+  var iF = inFlightURLs[u]
+  iF.push(cb_)
+  if (iF.length > 1) return
+
+  function cb (er, data) {
+    unlock(u, function () {
+      var c
+      while (c = iF.shift()) c(er, data)
+      delete inFlightURLs[u]
+    })
+  }
+
+  var p, co // cachePath, git-ref we want to check out
+
+  lock(u, function (er) {
+    if (er) return cb(er)
+
+    // figure out what we should check out.
+    var co = parsed.hash && parsed.hash.substr(1) || "master"
+    // git is so tricky!
+    // if the path is like ssh://foo:22/some/path then it works, but
+    // it needs the ssh://
+    // If the path is like ssh://foo:some/path then it works, but
+    // only if you remove the ssh://
+    var origUrl = u
+    u = u.replace(/^git\+/, "")
+         .replace(/#.*$/, "")
+
+    // ssh paths that are scp-style urls don't need the ssh://
+    if (parsed.pathname.match(/^\/?:/)) {
+      u = u.replace(/^ssh:\/\//, "")
+    }
+
+    var v = crypto.createHash("sha1").update(u).digest("hex").slice(0, 8)
+    v = u.replace(/[^a-zA-Z0-9]+/g, '-') + '-' + v
+
+    log.verbose("addRemoteGit", [u, co])
+
+    p = path.join(npm.config.get("cache"), "_git-remotes", v)
+
+    checkGitDir(p, u, co, origUrl, silent, function(er, data) {
+      chmodr(p, npm.modes.file, function(erChmod) {
+        if (er) return cb(er, data)
+        return cb(erChmod, data)
+      })
+    })
+  })
+}
+
+function checkGitDir (p, u, co, origUrl, silent, cb) {
+  fs.stat(p, function (er, s) {
+    if (er) return cloneGitRemote(p, u, co, origUrl, silent, cb)
+    if (!s.isDirectory()) return rm(p, function (er){
+      if (er) return cb(er)
+      cloneGitRemote(p, u, co, origUrl, silent, cb)
+    })
+
+    var git = npm.config.get("git")
+    var args = [ "config", "--get", "remote.origin.url" ]
+    var env = gitEnv()
+
+    // check for git
+    which(git, function (err) {
+      if (err) {
+        err.code = "ENOGIT"
+        return cb(err)
+      }
+      exec(git, args, {cwd: p, env: env}, function (er, stdout, stderr) {
+        stdoutTrimmed = (stdout + "\n" + stderr).trim()
+        if (er || u !== stdout.trim()) {
+          log.warn( "`git config --get remote.origin.url` returned "
+                  + "wrong result ("+u+")", stdoutTrimmed )
+          return rm(p, function (er){
+            if (er) return cb(er)
+            cloneGitRemote(p, u, co, origUrl, silent, cb)
+          })
+        }
+        log.verbose("git remote.origin.url", stdoutTrimmed)
+        archiveGitRemote(p, u, co, origUrl, cb)
+      })
+    })
+  })
+}
+
+function cloneGitRemote (p, u, co, origUrl, silent, cb) {
+  mkdir(p, function (er) {
+    if (er) return cb(er)
+
+    var git = npm.config.get("git")
+    var args = [ "clone", "--mirror", u, p ]
+    var env = gitEnv()
+
+    // check for git
+    which(git, function (err) {
+      if (err) {
+        err.code = "ENOGIT"
+        return cb(err)
+      }
+      exec(git, args, {cwd: p, env: env}, function (er, stdout, stderr) {
+        stdout = (stdout + "\n" + stderr).trim()
+        if (er) {
+          if (silent) {
+            log.verbose("git clone " + u, stdout)
+          } else {
+            log.error("git clone " + u, stdout)
+          }
+          return cb(er)
+        }
+        log.verbose("git clone " + u, stdout)
+        archiveGitRemote(p, u, co, origUrl, cb)
+      })
+    })
+  })
+}
+
+function archiveGitRemote (p, u, co, origUrl, cb) {
+  var git = npm.config.get("git")
+  var archive = [ "fetch", "-a", "origin" ]
+  var resolve = [ "rev-list", "-n1", co ]
+  var env = gitEnv()
+
+  var errState = null
+  var n = 0
+  var resolved = null
+  var tmp
+
+  exec(git, archive, {cwd: p, env: env}, function (er, stdout, stderr) {
+    stdout = (stdout + "\n" + stderr).trim()
+    if (er) {
+      log.error("git fetch -a origin ("+u+")", stdout)
+      return cb(er)
+    }
+    log.verbose("git fetch -a origin ("+u+")", stdout)
+    tmp = path.join(npm.tmp, Date.now()+"-"+Math.random(), "tmp.tgz")
+    resolveHead()
+  })
+
+  function resolveHead () {
+    exec(git, resolve, {cwd: p, env: env}, function (er, stdout, stderr) {
+      stdout = (stdout + "\n" + stderr).trim()
+      if (er) {
+        log.error("Failed resolving git HEAD (" + u + ")", stderr)
+        return cb(er)
+      }
+      log.verbose("git rev-list -n1 " + co, stdout)
+      var parsed = url.parse(origUrl)
+      parsed.hash = stdout
+      resolved = url.format(parsed)
+
+      // https://github.com/isaacs/npm/issues/3224
+      // node incorrectly sticks a / at the start of the path
+      // We know that the host won't change, so split and detect this
+      var spo = origUrl.split(parsed.host)
+      var spr = resolved.split(parsed.host)
+      if (spo[1].charAt(0) === ':' && spr[1].charAt(0) === '/')
+        spr[1] = spr[1].slice(1)
+      resolved = spr.join(parsed.host)
+
+      log.verbose('resolved git url', resolved)
+      next()
+    })
+  }
+
+  function next () {
+    mkdir(path.dirname(tmp), function (er) {
+      if (er) return cb(er)
+      var gzip = zlib.createGzip({ level: 9 })
+      var git = npm.config.get("git")
+      var args = ["archive", co, "--format=tar", "--prefix=package/"]
+      var out = fs.createWriteStream(tmp)
+      var env = gitEnv()
+      cb = once(cb)
+      var cp = spawn(git, args, { env: env, cwd: p })
+      cp.on("error", cb)
+      cp.stderr.on("data", function(chunk) {
+        log.silly(chunk.toString(), "git archive")
+      })
+
+      cp.stdout.pipe(gzip).pipe(out).on("close", function() {
+        addLocalTarball(tmp, function(er, data) {
+          if (data) data._resolved = resolved
+          cb(er, data)
+        })
+      })
+    })
+  }
+}
+
+var gitEnv_
+function gitEnv () {
+  // git responds to env vars in some weird ways in post-receive hooks
+  // so don't carry those along.
+  if (gitEnv_) return gitEnv_
+  gitEnv_ = {}
+  for (var k in process.env) {
+    if (!~['GIT_PROXY_COMMAND','GIT_SSH','GIT_SSL_NO_VERIFY'].indexOf(k) && k.match(/^GIT/)) continue
+    gitEnv_[k] = process.env[k]
+  }
+  return gitEnv_
+}
+
+
+// only have one request in flight for a given
+// name@blah thing.
+var inFlightNames = {}
+function addNamed (name, x, data, cb_) {
+  if (typeof cb_ !== "function") cb_ = data, data = null
+  log.verbose("addNamed", [name, x])
+
+  var k = name + "@" + x
+  if (!inFlightNames[k]) inFlightNames[k] = []
+  var iF = inFlightNames[k]
+  iF.push(cb_)
+  if (iF.length > 1) return
+
+  function cb (er, data) {
+    if (data && !data._fromGithub) data._from = k
+    unlock(k, function () {
+      var c
+      while (c = iF.shift()) c(er, data)
+      delete inFlightNames[k]
+    })
+  }
+
+  log.verbose("addNamed", [semver.valid(x), semver.validRange(x)])
+  lock(k, function (er, fd) {
+    if (er) return cb(er)
+
+    var fn = ( semver.valid(x, true) ? addNameVersion
+             : semver.validRange(x, true) ? addNameRange
+             : addNameTag
+             )
+    fn(name, x, data, cb)
+  })
+}
+
+function addNameTag (name, tag, data, cb_) {
+  if (typeof cb_ !== "function") cb_ = data, data = null
+  log.info("addNameTag", [name, tag])
+  var explicit = true
+  if (!tag) {
+    explicit = false
+    tag = npm.config.get("tag")
+  }
+
+  function cb(er, data) {
+    // might be username/project
+    // in that case, try it as a github url.
+    if (er && tag.split("/").length === 2) {
+      return maybeGithub(tag, name, er, cb_)
+    }
+    return cb_(er, data)
+  }
+
+  registry.get(name, function (er, data, json, response) {
+    if (er) return cb(er)
+    engineFilter(data)
+    if (data["dist-tags"] && data["dist-tags"][tag]
+        && data.versions[data["dist-tags"][tag]]) {
+      var ver = data["dist-tags"][tag]
+      return addNamed(name, ver, data.versions[ver], cb)
+    }
+    if (!explicit && Object.keys(data.versions).length) {
+      return addNamed(name, "*", data, cb)
+    }
+
+    er = installTargetsError(tag, data)
+    return cb(er)
+  })
+}
+
+
+function engineFilter (data) {
+  var npmv = npm.version
+    , nodev = npm.config.get("node-version")
+    , strict = npm.config.get("engine-strict")
+
+  if (!nodev || npm.config.get("force")) return data
+
+  Object.keys(data.versions || {}).forEach(function (v) {
+    var eng = data.versions[v].engines
+    if (!eng) return
+    if (!strict && !data.versions[v].engineStrict) return
+    if (eng.node && !semver.satisfies(nodev, eng.node, true)
+        || eng.npm && !semver.satisfies(npmv, eng.npm, true)) {
+      delete data.versions[v]
+    }
+  })
+}
+
+function addNameRange (name, range, data, cb) {
+  if (typeof cb !== "function") cb = data, data = null
+
+  range = semver.validRange(range, true)
+  if (range === null) return cb(new Error(
+    "Invalid version range: "+range))
+
+  log.silly("addNameRange", {name:name, range:range, hasData:!!data})
+
+  if (data) return next()
+  registry.get(name, function (er, d, json, response) {
+    if (er) return cb(er)
+    data = d
+    next()
+  })
+
+  function next () {
+    log.silly( "addNameRange", "number 2"
+             , {name:name, range:range, hasData:!!data})
+    engineFilter(data)
+
+    log.silly("addNameRange", "versions"
+             , [data.name, Object.keys(data.versions || {})])
+
+    // if the tagged version satisfies, then use that.
+    var tagged = data["dist-tags"][npm.config.get("tag")]
+    if (tagged
+        && data.versions[tagged]
+        && semver.satisfies(tagged, range, true)) {
+      return addNamed(name, tagged, data.versions[tagged], cb)
+    }
+
+    // find the max satisfying version.
+    var versions = Object.keys(data.versions || {})
+    var ms = semver.maxSatisfying(versions, range, true)
+    if (!ms) {
+      return cb(installTargetsError(range, data))
+    }
+
+    // if we don't have a registry connection, try to see if
+    // there's a cached copy that will be ok.
+    addNamed(name, ms, data.versions[ms], cb)
+  }
+}
+
+function installTargetsError (requested, data) {
+  var targets = Object.keys(data["dist-tags"]).filter(function (f) {
+    return (data.versions || {}).hasOwnProperty(f)
+  }).concat(Object.keys(data.versions || {}))
+
+  requested = data.name + (requested ? "@'" + requested + "'" : "")
+
+  targets = targets.length
+          ? "Valid install targets:\n" + JSON.stringify(targets) + "\n"
+          : "No valid targets found.\n"
+          + "Perhaps not compatible with your version of node?"
+
+  var er = new Error( "No compatible version found: "
+                  + requested + "\n" + targets)
+  er.code = "ETARGET"
+  return er
+}
+
+function addNameVersion (name, v, data, cb) {
+  if (typeof cb !== "function") cb = data, data = null
+
+  var ver = semver.valid(v, true)
+  if (!ver) return cb(new Error("Invalid version: "+v))
+
+  var response
+
+  if (data) {
+    response = null
+    return next()
+  }
+  registry.get(name + "/" + ver, function (er, d, json, resp) {
+    if (er) return cb(er)
+    data = d
+    response = resp
+    next()
+  })
+
+  function next () {
+    deprCheck(data)
+    var dist = data.dist
+
+    if (!dist) return cb(new Error("No dist in "+data._id+" package"))
+
+    if (!dist.tarball) return cb(new Error(
+      "No dist.tarball in " + data._id + " package"))
+
+    if ((response && response.statusCode !== 304) || npm.config.get("force")) {
+      return fetchit()
+    }
+
+    // we got cached data, so let's see if we have a tarball.
+    var pkgroot = path.join(npm.cache, name, ver)
+    var pkgtgz = path.join(pkgroot, "package.tgz")
+    var pkgjson = path.join(pkgroot, "package", "package.json")
+    fs.stat(pkgtgz, function (er, s) {
+      if (!er) {
+        readJson(pkgjson, function (er, data) {
+          er = needName(er, data)
+          er = needVersion(er, data)
+          if (er && er.code !== "ENOENT" && er.code !== "ENOTDIR")
+            return cb(er)
+          if (er) return fetchit()
+          return cb(null, data)
+        })
+      } else return fetchit()
+    })
+
+    function fetchit () {
+      if (!npm.config.get("registry")) {
+        return cb(new Error("Cannot fetch: "+dist.tarball))
+      }
+
+      // use the same protocol as the registry.
+      // https registry --> https tarballs, but
+      // only if they're the same hostname, or else
+      // detached tarballs may not work.
+      var tb = url.parse(dist.tarball)
+      var rp = url.parse(npm.config.get("registry"))
+      if (tb.hostname === rp.hostname
+          && tb.protocol !== rp.protocol) {
+        tb.protocol = url.parse(npm.config.get("registry")).protocol
+        delete tb.href
+      }
+      tb = url.format(tb)
+
+      // only add non-shasum'ed packages if --forced.
+      // only ancient things would lack this for good reasons nowadays.
+      if (!dist.shasum && !npm.config.get("force")) {
+        return cb(new Error("package lacks shasum: " + data._id))
+      }
+      return addRemoteTarball( tb
+                             , dist.shasum
+                             , name+"-"+ver
+                             , cb )
+    }
+  }
+}
+
+function addLocal (p, name, cb_) {
+  if (typeof cb_ !== "function") cb_ = name, name = ""
+
+  function cb (er, data) {
+    unlock(p, function () {
+      if (er) {
+        // if it doesn't have a / in it, it might be a
+        // remote thing.
+        if (p.indexOf("/") === -1 && p.charAt(0) !== "."
+           && (process.platform !== "win32" || p.indexOf("\\") === -1)) {
+          return addNamed(p, "", cb_)
+        }
+        log.error("addLocal", "Could not install %s", p)
+        return cb_(er)
+      }
+      if (data && !data._fromGithub) data._from = p
+      return cb_(er, data)
+    })
+  }
+
+  lock(p, function (er) {
+    if (er) return cb(er)
+    // figure out if this is a folder or file.
+    fs.stat(p, function (er, s) {
+      if (er) {
+        // might be username/project
+        // in that case, try it as a github url.
+        if (p.split("/").length === 2) {
+          return maybeGithub(p, name, er, cb)
+        }
+        return cb(er)
+      }
+      if (s.isDirectory()) addLocalDirectory(p, name, cb)
+      else addLocalTarball(p, name, cb)
+    })
+  })
+}
+
+function maybeGithub (p, name, er, cb) {
+  var u = "git://github.com/" + p
+    , up = url.parse(u)
+  log.info("maybeGithub", "Attempting %s from %s", p, u)
+
+  return addRemoteGit(u, up, name, true, function (er2, data) {
+    if (er2) {
+      var upriv = "git+ssh://git@github.com:" + p
+        , uppriv = url.parse(upriv)
+
+      log.info("maybeGithub", "Attempting %s from %s", p, upriv)
+
+      return addRemoteGit(upriv, uppriv, false, name, function (er3, data) {
+        if (er3) return cb(er)
+        success(upriv, data)
+      })
+    }
+    success(u, data)
+  })
+
+  function success (u, data) {
+    data._from = u
+    data._fromGithub = true
+    return cb(null, data)
+  }
+}
+
+function addLocalTarball (p, name, shasum, cb_) {
+  if (typeof cb_ !== "function") cb_ = shasum, shasum = null
+  if (typeof cb_ !== "function") cb_ = name, name = ""
+  // if it's a tar, and not in place,
+  // then unzip to .tmp, add the tmp folder, and clean up tmp
+  if (pathIsInside(p, npm.tmp))
+    return addTmpTarball(p, name, shasum, cb_)
+
+  if (pathIsInside(p, npm.cache)) {
+    if (path.basename(p) !== "package.tgz") return cb_(new Error(
+      "Not a valid cache tarball name: "+p))
+    return addPlacedTarball(p, name, shasum, cb_)
+  }
+
+  function cb (er, data) {
+    if (data) data._resolved = p
+    return cb_(er, data)
+  }
+
+  // just copy it over and then add the temp tarball file.
+  var tmp = path.join(npm.tmp, name + Date.now()
+                             + "-" + Math.random(), "tmp.tgz")
+  mkdir(path.dirname(tmp), function (er) {
+    if (er) return cb(er)
+    var from = fs.createReadStream(p)
+      , to = fs.createWriteStream(tmp)
+      , errState = null
+    function errHandler (er) {
+      if (errState) return
+      return cb(errState = er)
+    }
+    from.on("error", errHandler)
+    to.on("error", errHandler)
+    to.on("close", function () {
+      if (errState) return
+      log.verbose("chmod", tmp, npm.modes.file.toString(8))
+      fs.chmod(tmp, npm.modes.file, function (er) {
+        if (er) return cb(er)
+        addTmpTarball(tmp, name, shasum, cb)
+      })
+    })
+    from.pipe(to)
+  })
+}
+
+// to maintain the cache dir's permissions consistently.
+var cacheStat = null
+function getCacheStat (cb) {
+  if (cacheStat) return cb(null, cacheStat)
+  fs.stat(npm.cache, function (er, st) {
+    if (er) return makeCacheDir(cb)
+    if (!st.isDirectory()) {
+      log.error("getCacheStat", "invalid cache dir %j", npm.cache)
+      return cb(er)
+    }
+    return cb(null, cacheStat = st)
+  })
+}
+
+function makeCacheDir (cb) {
+  if (!process.getuid) return mkdir(npm.cache, cb)
+
+  var uid = +process.getuid()
+    , gid = +process.getgid()
+
+  if (uid === 0) {
+    if (process.env.SUDO_UID) uid = +process.env.SUDO_UID
+    if (process.env.SUDO_GID) gid = +process.env.SUDO_GID
+  }
+  if (uid !== 0 || !process.env.HOME) {
+    cacheStat = {uid: uid, gid: gid}
+    return mkdir(npm.cache, afterMkdir)
+  }
+
+  fs.stat(process.env.HOME, function (er, st) {
+    if (er) {
+      log.error("makeCacheDir", "homeless?")
+      return cb(er)
+    }
+    cacheStat = st
+    log.silly("makeCacheDir", "cache dir uid, gid", [st.uid, st.gid])
+    return mkdir(npm.cache, afterMkdir)
+  })
+
+  function afterMkdir (er, made) {
+    if (er || !cacheStat || isNaN(cacheStat.uid) || isNaN(cacheStat.gid)) {
+      return cb(er, cacheStat)
+    }
+
+    if (!made) return cb(er, cacheStat)
+
+    // ensure that the ownership is correct.
+    chownr(made, cacheStat.uid, cacheStat.gid, function (er) {
+      return cb(er, cacheStat)
+    })
+  }
+}
+
+
+
+
+function addPlacedTarball (p, name, shasum, cb) {
+  if (!cb) cb = name, name = ""
+  getCacheStat(function (er, cs) {
+    if (er) return cb(er)
+    return addPlacedTarball_(p, name, cs.uid, cs.gid, shasum, cb)
+  })
+}
+
+// Resolved sum is the shasum from the registry dist object, but
+// *not* necessarily the shasum of this tarball, because for stupid
+// historical reasons, npm re-packs each package an extra time through
+// a temp directory, so all installed packages are actually built with
+// *this* version of npm, on this machine.
+//
+// Once upon a time, this meant that we could change package formats
+// around and fix junk that might be added by incompatible tar
+// implementations.  Then, for a while, it was a way to correct bs
+// added by bugs in our own tar implementation.  Now, it's just
+// garbage, but cleaning it up is a pain, and likely to cause issues
+// if anything is overlooked, so it's not high priority.
+//
+// If you're bored, and looking to make npm go faster, and you've
+// already made it this far in this file, here's a better methodology:
+//
+// cache.add should really be cache.place.  That is, it should take
+// a set of arguments like it does now, but then also a destination
+// folder.
+//
+// cache.add('foo@bar', '/path/node_modules/foo', cb)
+//
+// 1. Resolve 'foo@bar' to some specific:
+//   - git url
+//   - local folder
+//   - local tarball
+//   - tarball url
+// 2. If resolved through the registry, then pick up the dist.shasum
+// along the way.
+// 3. Acquire request() stream fetching bytes: FETCH
+// 4. FETCH.pipe(tar unpack stream to dest)
+// 5. FETCH.pipe(shasum generator)
+// When the tar and shasum streams both finish, make sure that the
+// shasum matches dist.shasum, and if not, clean up and bail.
+//
+// publish(cb)
+//
+// 1. read package.json
+// 2. get root package object (for rev, and versions)
+// 3. update root package doc with version info
+// 4. remove _attachments object
+// 5. remove versions object
+// 5. jsonify, remove last }
+// 6. get stream: registry.put(/package)
+// 7. write trailing-}-less JSON
+// 8. write "_attachments":
+// 9. JSON.stringify(attachments), remove trailing }
+// 10. Write start of attachments (stubs)
+// 11. JSON(filename)+':{"type":"application/octet-stream","data":"'
+// 12. acquire tar packing stream, PACK
+// 13. PACK.pipe(PUT)
+// 14. PACK.pipe(shasum generator)
+// 15. when PACK finishes, get shasum
+// 16. PUT.write('"}},') (finish _attachments
+// 17. update "versions" object with current package version
+// (including dist.shasum and dist.tarball)
+// 18. write '"versions":' + JSON(versions)
+// 19. write '}}' (versions, close main doc)
+
+function addPlacedTarball_ (p, name, uid, gid, resolvedSum, cb) {
+  // now we know it's in place already as .cache/name/ver/package.tgz
+  // unpack to .cache/name/ver/package/, read the package.json,
+  // and fire cb with the json data.
+  var target = path.dirname(p)
+    , folder = path.join(target, "package")
+
+  lock(folder, function (er) {
+    if (er) return cb(er)
+    rmUnpack()
+  })
+
+  function rmUnpack () {
+    rm(folder, function (er) {
+      unlock(folder, function () {
+        if (er) {
+          log.error("addPlacedTarball", "Could not remove %j", folder)
+          return cb(er)
+        }
+        thenUnpack()
+      })
+    })
+  }
+
+  function thenUnpack () {
+    tar.unpack(p, folder, null, null, uid, gid, function (er) {
+      if (er) {
+        log.error("addPlacedTarball", "Could not unpack %j to %j", p, target)
+        return cb(er)
+      }
+      // calculate the sha of the file that we just unpacked.
+      // this is so that the data is available when publishing.
+      sha.get(p, function (er, shasum) {
+        if (er) {
+          log.error("addPlacedTarball", "shasum fail", p)
+          return cb(er)
+        }
+        readJson(path.join(folder, "package.json"), function (er, data) {
+          er = needName(er, data)
+          er = needVersion(er, data)
+          if (er) {
+            log.error("addPlacedTarball", "Couldn't read json in %j"
+                     , folder)
+            return cb(er)
+          }
+
+          data.dist = data.dist || {}
+          data.dist.shasum = shasum
+          deprCheck(data)
+          asyncMap([p], function (f, cb) {
+            log.verbose("chmod", f, npm.modes.file.toString(8))
+            fs.chmod(f, npm.modes.file, cb)
+          }, function (f, cb) {
+            if (process.platform === "win32") {
+              log.silly("chown", "skipping for windows", f)
+              cb()
+            } else if (typeof uid === "number"
+                && typeof gid === "number"
+                && parseInt(uid, 10) === uid
+                && parseInt(gid, 10) === gid) {
+              log.verbose("chown", f, [uid, gid])
+              fs.chown(f, uid, gid, cb)
+            } else {
+              log.verbose("chown", "skip for invalid uid/gid", [f, uid, gid])
+              cb()
+            }
+          }, function (er) {
+            cb(er, data)
+          })
+        })
+      })
+    })
+  }
+}
+
+// At this point, if shasum is set, it's something that we've already
+// read and checked.  Just stashing it in the data at this point.
+function addLocalDirectory (p, name, shasum, cb) {
+  if (typeof cb !== "function") cb = shasum, shasum = ""
+  if (typeof cb !== "function") cb = name, name = ""
+  // if it's a folder, then read the package.json,
+  // tar it to the proper place, and add the cache tar
+  if (pathIsInside(p, npm.cache)) return cb(new Error(
+    "Adding a cache directory to the cache will make the world implode."))
+  readJson(path.join(p, "package.json"), false, function (er, data) {
+    er = needName(er, data)
+    er = needVersion(er, data)
+    if (er) return cb(er)
+    deprCheck(data)
+    var random = Date.now() + "-" + Math.random()
+      , tmp = path.join(npm.tmp, random)
+      , tmptgz = path.resolve(tmp, "tmp.tgz")
+      , placed = path.resolve( npm.cache, data.name
+                             , data.version, "package.tgz" )
+      , placeDirect = path.basename(p) === "package"
+      , tgz = placeDirect ? placed : tmptgz
+    getCacheStat(function (er, cs) {
+      mkdir(path.dirname(tgz), function (er, made) {
+        if (er) return cb(er)
+
+        var fancy = !pathIsInside(p, npm.tmp)
+                    && !pathIsInside(p, npm.cache)
+        tar.pack(tgz, p, data, fancy, function (er) {
+          if (er) {
+            log.error( "addLocalDirectory", "Could not pack %j to %j"
+                     , p, tgz )
+            return cb(er)
+          }
+
+          // if we don't get a cache stat, or if the gid/uid is not
+          // a number, then just move on.  chown would fail anyway.
+          if (!cs || isNaN(cs.uid) || isNaN(cs.gid)) return cb()
+
+          chownr(made || tgz, cs.uid, cs.gid, function (er) {
+            if (er) return cb(er)
+            addLocalTarball(tgz, name, shasum, cb)
+          })
+        })
+      })
+    })
+  })
+}
+
+function addTmpTarball (tgz, name, shasum, cb) {
+  if (!cb) cb = name, name = ""
+  getCacheStat(function (er, cs) {
+    if (er) return cb(er)
+    var contents = path.dirname(tgz)
+    tar.unpack( tgz, path.resolve(contents, "package")
+              , null, null
+              , cs.uid, cs.gid
+              , function (er) {
+      if (er) {
+        return cb(er)
+      }
+      addLocalDirectory(path.resolve(contents, "package"), name, shasum, cb)
+    })
+  })
+}
+
+function unpack (pkg, ver, unpackTarget, dMode, fMode, uid, gid, cb) {
+  if (typeof cb !== "function") cb = gid, gid = null
+  if (typeof cb !== "function") cb = uid, uid = null
+  if (typeof cb !== "function") cb = fMode, fMode = null
+  if (typeof cb !== "function") cb = dMode, dMode = null
+
+  read(pkg, ver, false, function (er, data) {
+    if (er) {
+      log.error("unpack", "Could not read data for %s", pkg + "@" + ver)
+      return cb(er)
+    }
+    npm.commands.unbuild([unpackTarget], true, function (er) {
+      if (er) return cb(er)
+      tar.unpack( path.join(npm.cache, pkg, ver, "package.tgz")
+                , unpackTarget
+                , dMode, fMode
+                , uid, gid
+                , cb )
+    })
+  })
+}
+
+var deprecated = {}
+  , deprWarned = {}
+function deprCheck (data) {
+  if (deprecated[data._id]) data.deprecated = deprecated[data._id]
+  if (data.deprecated) deprecated[data._id] = data.deprecated
+  else return
+  if (!deprWarned[data._id]) {
+    deprWarned[data._id] = true
+    log.warn("deprecated", "%s: %s", data._id, data.deprecated)
+  }
+}
+
+function lockFileName (u) {
+  var c = u.replace(/[^a-zA-Z0-9]+/g, "-").replace(/^-+|-+$/g, "")
+    , h = crypto.createHash("sha1").update(u).digest("hex")
+  h = h.substr(0, 8)
+  c = c.substr(-32)
+  log.silly("lockFile", h + "-" + c, u)
+  return path.resolve(npm.config.get("cache"), h + "-" + c + ".lock")
+}
+
+var myLocks = {}
+function lock (u, cb) {
+  // the cache dir needs to exist already for this.
+  getCacheStat(function (er, cs) {
+    if (er) return cb(er)
+    var opts = { stale: npm.config.get("cache-lock-stale")
+               , retries: npm.config.get("cache-lock-retries")
+               , wait: npm.config.get("cache-lock-wait") }
+    var lf = lockFileName(u)
+    log.verbose("lock", u, lf)
+    lockFile.lock(lf, opts, function(er) {
+      if (!er) myLocks[lf] = true
+      cb(er)
+    })
+  })
+}
+
+function unlock (u, cb) {
+  var lf = lockFileName(u)
+  if (!myLocks[lf]) return process.nextTick(cb)
+  myLocks[lf] = false
+  lockFile.unlock(lockFileName(u), cb)
+}
+
+function needName(er, data) {
+  return er ? er
+       : (data && !data.name) ? new Error("No name provided")
+       : null
+}
+
+function needVersion(er, data) {
+  return er ? er
+       : (data && !data.version) ? new Error("No version provided")
+       : null
+}

http://git-wip-us.apache.org/repos/asf/incubator-cmda/blob/a9a83675/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/lib/completion.js
----------------------------------------------------------------------
diff --git a/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/lib/completion.js b/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/lib/completion.js
new file mode 100644
index 0000000..0e56d04
--- /dev/null
+++ b/ApacheCMDA_Backend_1.0/project/target/node-modules/webjars/npm/lib/completion.js
@@ -0,0 +1,253 @@
+
+module.exports = completion
+
+completion.usage = "npm completion >> ~/.bashrc\n"
+                 + "npm completion >> ~/.zshrc\n"
+                 + "source <(npm completion)"
+
+var npm = require("./npm.js")
+  , npmconf = require("npmconf")
+  , configDefs = npmconf.defs
+  , configTypes = configDefs.types
+  , shorthands = configDefs.shorthands
+  , nopt = require("nopt")
+  , configNames = Object.keys(configTypes).filter(function (e) {
+      return e.charAt(0) !== "_"
+    })
+  , shorthandNames = Object.keys(shorthands)
+  , allConfs = configNames.concat(shorthandNames)
+  , once = require("once")
+
+
+completion.completion = function (opts, cb) {
+  if (opts.w > 3) return cb()
+
+  var fs = require("graceful-fs")
+    , path = require("path")
+    , bashExists = null
+    , zshExists = null
+    , bashProfExists = null
+  fs.stat(path.resolve(process.env.HOME, ".bashrc"), function (er, b) {
+    bashExists = !er
+    next()
+  })
+  fs.stat(path.resolve(process.env.HOME, ".zshrc"), function (er, b) {
+    zshExists = !er
+    next()
+  })
+  function next () {
+    if (zshExists === null || bashExists === null) return
+    var out = []
+    if (zshExists) out.push("~/.zshrc")
+    if (bashExists) out.push("~/.bashrc")
+    if (opts.w === 2) out = out.map(function (m) {
+      return [">>", m]
+    })
+    cb(null, out)
+  }
+}
+
+function completion (args, cb) {
+  if (process.platform === "win32") {
+    var e = new Error("npm completion not supported on windows")
+    e.code = "ENOTSUP"
+    e.errno = require("constants").ENOTSUP
+    return cb(e)
+  }
+
+  // if the COMP_* isn't in the env, then just dump the script.
+  if (process.env.COMP_CWORD === undefined
+    ||process.env.COMP_LINE === undefined
+    ||process.env.COMP_POINT === undefined
+    ) return dumpScript(cb)
+
+  console.error(process.env.COMP_CWORD)
+  console.error(process.env.COMP_LINE)
+  console.error(process.env.COMP_POINT)
+
+  //console.log("abracadabrasauce\nabracad cat monger")
+  //if (Math.random() * 3 < 1) console.log("man\\ bear\\ pig")
+  //else if (Math.random() * 3 < 1)
+  //  console.log("porkchop\\ sandwiches\nporkman")
+  //else console.log("encephylophagy")
+
+  // get the partial line and partial word,
+  // if the point isn't at the end.
+  // ie, tabbing at: npm foo b|ar
+  var w = +process.env.COMP_CWORD
+    , words = args.map(unescape)
+    , word = words[w]
+    , line = process.env.COMP_LINE
+    , point = +process.env.COMP_POINT
+    , lineLength = line.length
+    , partialLine = line.substr(0, point)
+    , partialWords = words.slice(0, w)
+
+  // figure out where in that last word the point is.
+  var partialWord = args[w]
+    , i = partialWord.length
+  while (partialWord.substr(0, i) !== partialLine.substr(-1*i) && i > 0) {
+    i --
+  }
+  partialWord = unescape(partialWord.substr(0, i))
+  partialWords.push(partialWord)
+
+  var opts = { words : words
+             , w : w
+             , word : word
+             , line : line
+             , lineLength : line.length
+             , point : point
+             , partialLine : partialLine
+             , partialWords : partialWords
+             , partialWord : partialWord
+             , raw: args
+             }
+
+  cb = wrapCb(cb, opts)
+
+  console.error(opts)
+
+  if (partialWords.slice(0, -1).indexOf("--") === -1) {
+    if (word.charAt(0) === "-") return configCompl(opts, cb)
+    if (words[w - 1]
+        && words[w - 1].charAt(0) === "-"
+        && !isFlag(words[w - 1])) {
+      // awaiting a value for a non-bool config.
+      // don't even try to do this for now
+      console.error("configValueCompl")
+      return configValueCompl(opts, cb)
+    }
+  }
+
+  // try to find the npm command.
+  // it's the first thing after all the configs.
+  // take a little shortcut and use npm's arg parsing logic.
+  // don't have to worry about the last arg being implicitly
+  // boolean'ed, since the last block will catch that.
+  var parsed = opts.conf =
+    nopt(configTypes, shorthands, partialWords.slice(0, -1), 0)
+  // check if there's a command already.
+  console.error(parsed)
+  var cmd = parsed.argv.remain[1]
+  if (!cmd) return cmdCompl(opts, cb)
+
+  Object.keys(parsed).forEach(function (k) {
+    npm.config.set(k, parsed[k])
+  })
+
+  // at this point, if words[1] is some kind of npm command,
+  // then complete on it.
+  // otherwise, do nothing
+  cmd = npm.commands[cmd]
+  if (cmd && cmd.completion) return cmd.completion(opts, cb)
+
+  // nothing to do.
+  cb()
+}
+
+function dumpScript (cb) {
+  var fs = require("graceful-fs")
+    , path = require("path")
+    , p = path.resolve(__dirname, "utils/completion.sh")
+
+  // The Darwin patch below results in callbacks first for the write and then
+  // for the error handler, so make sure we only call our callback once.
+  cb = once(cb)
+
+  fs.readFile(p, "utf8", function (er, d) {
+    if (er) return cb(er)
+    d = d.replace(/^\#\!.*?\n/, "")
+
+    process.stdout.write(d, function (n) { cb() })
+    process.stdout.on("error", function (er) {
+      // Darwin is a real dick sometimes.
+      //
+      // This is necessary because the "source" or "." program in
+      // bash on OS X closes its file argument before reading
+      // from it, meaning that you get exactly 1 write, which will
+      // work most of the time, and will always raise an EPIPE.
+      //
+      // Really, one should not be tossing away EPIPE errors, or any
+      // errors, so casually.  But, without this, `. <(npm completion)`
+      // can never ever work on OS X.
+      if (er.errno === "EPIPE") er = null
+      cb(er)
+    })
+
+  })
+}
+
+function unescape (w) {
+  if (w.charAt(0) === "\"") return w.replace(/^"|"$/g, "")
+  else return w.replace(/\\ /g, " ")
+}
+
+function escape (w) {
+  if (!w.match(/\s+/)) return w
+  return "\"" + w + "\""
+}
+
+// The command should respond with an array.  Loop over that,
+// wrapping quotes around any that have spaces, and writing
+// them to stdout.  Use console.log, not the outfd config.
+// If any of the items are arrays, then join them with a space.
+// Ie, returning ["a", "b c", ["d", "e"]] would allow it to expand
+// to: "a", "b c", or "d" "e"
+function wrapCb (cb, opts) { return function (er, compls) {
+  if (!Array.isArray(compls)) compls = compls ? [compls] : []
+  compls = compls.map(function (c) {
+    if (Array.isArray(c)) c = c.map(escape).join(" ")
+    else c = escape(c)
+    return c
+  })
+  if (opts.partialWord) compls = compls.filter(function (c) {
+    return c.indexOf(opts.partialWord) === 0
+  })
+  console.error([er && er.stack, compls, opts.partialWord])
+  if (er || compls.length === 0) return cb(er)
+
+  console.log(compls.join("\n"))
+  cb()
+}}
+
+// the current word has a dash.  Return the config names,
+// with the same number of dashes as the current word has.
+function configCompl (opts, cb) {
+  var word = opts.word
+    , split = word.match(/^(-+)((?:no-)*)(.*)$/)
+    , dashes = split[1]
+    , no = split[2]
+    , conf = split[3]
+    , confs = allConfs
+    , flags = configNames.filter(isFlag)
+  console.error(flags)
+
+  return cb(null, allConfs.map(function (c) {
+    return dashes + c
+  }).concat(flags.map(function (f) {
+    return dashes + (no || "no-") + f
+  })))
+}
+
+// expand with the valid values of various config values.
+// not yet implemented.
+function configValueCompl (opts, cb) {
+  console.error('configValue', opts)
+  return cb(null, [])
+}
+
+// check if the thing is a flag or not.
+function isFlag (word) {
+  // shorthands never take args.
+  var split = word.match(/^(-*)((?:no-)+)?(.*)$/)
+    , dashes = split[1]
+    , no = split[2]
+    , conf = split[3]
+  return no || configTypes[conf] === Boolean || shorthands[conf]
+}
+
+// complete against the npm commands
+function cmdCompl (opts, cb) {
+  return cb(null, npm.fullList)
+}


Mime
View raw message