Return-Path: X-Original-To: apmail-couchdb-commits-archive@www.apache.org Delivered-To: apmail-couchdb-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 1F35418971 for ; Wed, 14 Oct 2015 10:09:05 +0000 (UTC) Received: (qmail 17428 invoked by uid 500); 14 Oct 2015 10:09:02 -0000 Delivered-To: apmail-couchdb-commits-archive@couchdb.apache.org Received: (qmail 17313 invoked by uid 500); 14 Oct 2015 10:09:02 -0000 Mailing-List: contact commits-help@couchdb.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@couchdb.apache.org Delivered-To: mailing list commits@couchdb.apache.org Received: (qmail 14566 invoked by uid 99); 14 Oct 2015 10:09:00 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 14 Oct 2015 10:09:00 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id CC65EE0664; Wed, 14 Oct 2015 10:08:59 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: garren@apache.org To: commits@couchdb.apache.org Date: Wed, 14 Oct 2015 10:09:44 -0000 Message-Id: <413d54ea1c254393ba6573c73d84c8bc@git.apache.org> In-Reply-To: References: X-Mailer: ASF-Git Admin Mailer Subject: [47/52] [partial] couchdb-nmo git commit: prepare for release http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/bulkbadger/node_modules/JSONStream/test/fixtures/depth.json ---------------------------------------------------------------------- diff --git a/node_modules/bulkbadger/node_modules/JSONStream/test/fixtures/depth.json b/node_modules/bulkbadger/node_modules/JSONStream/test/fixtures/depth.json new file mode 100644 index 0000000..868062f --- /dev/null +++ b/node_modules/bulkbadger/node_modules/JSONStream/test/fixtures/depth.json @@ -0,0 +1,15 @@ +{ + "total": 5, + "docs": [ + { + "key": { + "value": 0, + "some": "property" + } + }, + {"value": 1}, + {"value": 2}, + {"blbl": [{}, {"a":0, "b":1, "value":3}, 10]}, + {"value": 4} + ] +} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/bulkbadger/node_modules/JSONStream/test/fn.js ---------------------------------------------------------------------- diff --git a/node_modules/bulkbadger/node_modules/JSONStream/test/fn.js b/node_modules/bulkbadger/node_modules/JSONStream/test/fn.js new file mode 100644 index 0000000..4acc672 --- /dev/null +++ b/node_modules/bulkbadger/node_modules/JSONStream/test/fn.js @@ -0,0 +1,39 @@ + + +var fs = require ('fs') + , join = require('path').join + , file = join(__dirname, 'fixtures','all_npm.json') + , JSONStream = require('../') + , it = require('it-is') + +function fn (s) { + return !isNaN(parseInt(s, 10)) +} + +var expected = JSON.parse(fs.readFileSync(file)) + , parser = JSONStream.parse(['rows', fn]) + , called = 0 + , ended = false + , parsed = [] + +fs.createReadStream(file).pipe(parser) + +parser.on('data', function (data) { + called ++ + it.has({ + id: it.typeof('string'), + value: {rev: it.typeof('string')}, + key:it.typeof('string') + }) + parsed.push(data) +}) + +parser.on('end', function () { + ended = true +}) + +process.on('exit', function () { + it(called).equal(expected.rows.length) + it(parsed).deepEqual(expected.rows) + console.error('PASSED') +}) http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/bulkbadger/node_modules/JSONStream/test/gen.js ---------------------------------------------------------------------- diff --git a/node_modules/bulkbadger/node_modules/JSONStream/test/gen.js b/node_modules/bulkbadger/node_modules/JSONStream/test/gen.js new file mode 100644 index 0000000..c233722 --- /dev/null +++ b/node_modules/bulkbadger/node_modules/JSONStream/test/gen.js @@ -0,0 +1,135 @@ +return // dont run this test for now since tape is weird and broken on 0.10 + +var fs = require('fs') +var JSONStream = require('../') +var file = process.argv[2] || '/tmp/JSONStream-test-large.json' +var size = Number(process.argv[3] || 100000) +var tape = require('tape') +// if (process.title !== 'browser') { + tape('out of mem', function (t) { + t.plan(1) + ////////////////////////////////////////////////////// + // Produces a random number between arg1 and arg2 + ////////////////////////////////////////////////////// + var randomNumber = function (min, max) { + var number = Math.floor(Math.random() * (max - min + 1) + min); + return number; + }; + + ////////////////////////////////////////////////////// + // Produces a random string of a length between arg1 and arg2 + ////////////////////////////////////////////////////// + var randomString = function (min, max) { + + // add several spaces to increase chanses of creating 'words' + var chars = ' 0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'; + var result = ''; + + var randomLength = randomNumber(min, max); + + for (var i = randomLength; i > 0; --i) { + result += chars[Math.round(Math.random() * (chars.length - 1))]; + } + return result; + }; + + ////////////////////////////////////////////////////// + // Produces a random JSON document, as a string + ////////////////////////////////////////////////////// + var randomJsonDoc = function () { + + var doc = { + "CrashOccurenceID": randomNumber(10000, 50000), + "CrashID": randomNumber(1000, 10000), + "SiteName": randomString(10, 25), + "MachineName": randomString(10, 25), + "Date": randomString(26, 26), + "ProcessDuration": randomString(18, 18), + "ThreadIdentityName": null, + "WindowsIdentityName": randomString(15, 40), + "OperatingSystemName": randomString(35, 65), + "DetailedExceptionInformation": randomString(100, 800) + }; + + doc = JSON.stringify(doc); + doc = doc.replace(/\,/g, ',\n'); // add new lines after each attribute + return doc; + }; + + ////////////////////////////////////////////////////// + // generates test data + ////////////////////////////////////////////////////// + var generateTestData = function (cb) { + + console.log('generating large data file...'); + + var stream = fs.createWriteStream(file, { + encoding: 'utf8' + }); + + var i = 0; + var max = size; + var writing = false + var split = ',\n'; + var doc = randomJsonDoc(); + stream.write('['); + + function write () { + if(writing) return + writing = true + while(++i < max) { + if(Math.random() < 0.001) + console.log('generate..', i + ' / ' + size) + if(!stream.write(doc + split)) { + writing = false + return stream.once('drain', write) + } + } + stream.write(doc + ']') + stream.end(); + console.log('END') + } + write() + stream.on('close', cb) + }; + + ////////////////////////////////////////////////////// + // Shows that parsing 100000 instances using JSONStream fails + // + // After several seconds, you will get this crash + // FATAL ERROR: JS Allocation failed - process out of memory + ////////////////////////////////////////////////////// + var testJSONStreamParse_causesOutOfMem = function (done) { + var items = 0 + console.log('parsing data files using JSONStream...'); + + var parser = JSONStream.parse([true]); + var stream = fs.createReadStream(file); + stream.pipe(parser); + + parser.on('data', function (data) { + items++ + if(Math.random() < 0.01) console.log(items, '...') + }); + + parser.on('end', function () { + t.equal(items, size) + }); + + }; + + ////////////////////////////////////////////////////// + // main + ////////////////////////////////////////////////////// + + fs.stat(file, function (err, stat) { + console.log(stat) + if(err) + generateTestData(testJSONStreamParse_causesOutOfMem); + else + testJSONStreamParse_causesOutOfMem() + }) + + }) + +// } http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/bulkbadger/node_modules/JSONStream/test/issues.js ---------------------------------------------------------------------- diff --git a/node_modules/bulkbadger/node_modules/JSONStream/test/issues.js b/node_modules/bulkbadger/node_modules/JSONStream/test/issues.js new file mode 100644 index 0000000..eba392e --- /dev/null +++ b/node_modules/bulkbadger/node_modules/JSONStream/test/issues.js @@ -0,0 +1,20 @@ +var JSONStream = require('../'); +var test = require('tape') + +test('#66', function (t) { + var error = 0; + var stream = JSONStream + .parse() + .on('error', function (err) { + t.ok(err); + error++; + }) + .on('end', function () { + t.ok(error === 1); + t.end(); + }); + + stream.write('["foo":bar['); + stream.end(); + +}); http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/bulkbadger/node_modules/JSONStream/test/map.js ---------------------------------------------------------------------- diff --git a/node_modules/bulkbadger/node_modules/JSONStream/test/map.js b/node_modules/bulkbadger/node_modules/JSONStream/test/map.js new file mode 100644 index 0000000..29b9d89 --- /dev/null +++ b/node_modules/bulkbadger/node_modules/JSONStream/test/map.js @@ -0,0 +1,40 @@ + +var test = require('tape') + +var JSONStream = require('../') + +test('map function', function (t) { + + var actual = [] + + stream = JSONStream.parse([true], function (e) { return e*10 }) + stream.on('data', function (v) { actual.push(v)}) + stream.on('end', function () { + t.deepEqual(actual, [10,20,30,40,50,60]) + t.end() + + }) + + stream.write(JSON.stringify([1,2,3,4,5,6], null, 2)) + stream.end() + +}) + +test('filter function', function (t) { + + var actual = [] + + stream = JSONStream + .parse([true], function (e) { return e%2 ? e : null}) + .on('data', function (v) { actual.push(v)}) + .on('end', function () { + t.deepEqual(actual, [1,3,5]) + t.end() + + }) + + stream.write(JSON.stringify([1,2,3,4,5,6], null, 2)) + stream.end() + +}) + http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/bulkbadger/node_modules/JSONStream/test/multiple_objects.js ---------------------------------------------------------------------- diff --git a/node_modules/bulkbadger/node_modules/JSONStream/test/multiple_objects.js b/node_modules/bulkbadger/node_modules/JSONStream/test/multiple_objects.js new file mode 100644 index 0000000..22f6324 --- /dev/null +++ b/node_modules/bulkbadger/node_modules/JSONStream/test/multiple_objects.js @@ -0,0 +1,36 @@ +var fs = require ('fs'); +var net = require('net'); +var join = require('path').join; +var file = join(__dirname, 'fixtures','all_npm.json'); +var it = require('it-is'); +var JSONStream = require('../'); + +var str = fs.readFileSync(file); + +var datas = {} + +var server = net.createServer(function(client) { + var data_calls = 0; + var parser = JSONStream.parse(['rows', true, 'key']); + parser.on('data', function(data) { + ++ data_calls; + datas[data] = (datas[data] || 0) + 1 + it(data).typeof('string') + }); + + parser.on('end', function() { + console.log('END') + var min = Infinity + for (var d in datas) + min = min > datas[d] ? datas[d] : min + it(min).equal(3); + server.close(); + }); + client.pipe(parser); +}); +server.listen(9999); + +var client = net.connect({ port : 9999 }, function() { + var msgs = str + ' ' + str + '\n\n' + str + client.end(msgs); +}); http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/bulkbadger/node_modules/JSONStream/test/multiple_objects_error.js ---------------------------------------------------------------------- diff --git a/node_modules/bulkbadger/node_modules/JSONStream/test/multiple_objects_error.js b/node_modules/bulkbadger/node_modules/JSONStream/test/multiple_objects_error.js new file mode 100644 index 0000000..83d113b --- /dev/null +++ b/node_modules/bulkbadger/node_modules/JSONStream/test/multiple_objects_error.js @@ -0,0 +1,29 @@ +var fs = require ('fs'); +var net = require('net'); +var join = require('path').join; +var file = join(__dirname, 'fixtures','all_npm.json'); +var it = require('it-is'); +var JSONStream = require('../'); + +var str = fs.readFileSync(file); + +var server = net.createServer(function(client) { + var data_calls = 0; + var parser = JSONStream.parse(); + parser.on('error', function(err) { + console.log(err); + server.close(); + }); + + parser.on('end', function() { + console.log('END'); + server.close(); + }); + client.pipe(parser); +}); +server.listen(9999); + +var client = net.connect({ port : 9999 }, function() { + var msgs = str + '}'; + client.end(msgs); +}); http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/bulkbadger/node_modules/JSONStream/test/null.js ---------------------------------------------------------------------- diff --git a/node_modules/bulkbadger/node_modules/JSONStream/test/null.js b/node_modules/bulkbadger/node_modules/JSONStream/test/null.js new file mode 100644 index 0000000..95dd60c --- /dev/null +++ b/node_modules/bulkbadger/node_modules/JSONStream/test/null.js @@ -0,0 +1,28 @@ +var JSONStream = require('../') + +var data = [ + {ID: 1, optional: null}, + {ID: 2, optional: null}, + {ID: 3, optional: 20}, + {ID: 4, optional: null}, + {ID: 5, optional: 'hello'}, + {ID: 6, optional: null} +] + + +var test = require('tape') + +test ('null properties', function (t) { + var actual = [] + var stream = + + JSONStream.parse('*.optional') + .on('data', function (v) { actual.push(v) }) + .on('end', function () { + t.deepEqual(actual, [20, 'hello']) + t.end() + }) + + stream.write(JSON.stringify(data, null, 2)) + stream.end() +}) http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/bulkbadger/node_modules/JSONStream/test/parsejson.js ---------------------------------------------------------------------- diff --git a/node_modules/bulkbadger/node_modules/JSONStream/test/parsejson.js b/node_modules/bulkbadger/node_modules/JSONStream/test/parsejson.js new file mode 100644 index 0000000..0279887 --- /dev/null +++ b/node_modules/bulkbadger/node_modules/JSONStream/test/parsejson.js @@ -0,0 +1,28 @@ + + +/* + sometimes jsonparse changes numbers slightly. +*/ + +var r = Math.random() + , Parser = require('jsonparse') + , p = new Parser() + , assert = require('assert') + , times = 20 +while (times --) { + + assert.equal(JSON.parse(JSON.stringify(r)), r, 'core JSON') + + p.onValue = function (v) { + console.error('parsed', v) + assert.equal( + String(v).slice(0,12), + String(r).slice(0,12) + ) + } + console.error('correct', r) + p.write (new Buffer(JSON.stringify([r]))) + + + +} http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/bulkbadger/node_modules/JSONStream/test/stringify.js ---------------------------------------------------------------------- diff --git a/node_modules/bulkbadger/node_modules/JSONStream/test/stringify.js b/node_modules/bulkbadger/node_modules/JSONStream/test/stringify.js new file mode 100644 index 0000000..b6de85e --- /dev/null +++ b/node_modules/bulkbadger/node_modules/JSONStream/test/stringify.js @@ -0,0 +1,41 @@ + +var fs = require ('fs') + , join = require('path').join + , file = join(__dirname, 'fixtures','all_npm.json') + , JSONStream = require('../') + , it = require('it-is').style('colour') + + function randomObj () { + return ( + Math.random () < 0.4 + ? {hello: 'eonuhckmqjk', + whatever: 236515, + lies: true, + nothing: [null], + stuff: [Math.random(),Math.random(),Math.random()] + } + : ['AOREC', 'reoubaor', {ouec: 62642}, [[[], {}, 53]]] + ) + } + +var expected = [] + , stringify = JSONStream.stringify() + , es = require('event-stream') + , stringified = '' + , called = 0 + , count = 10 + , ended = false + +while (count --) + expected.push(randomObj()) + + es.connect( + es.readArray(expected), + stringify, + //JSONStream.parse([/./]), + es.writeArray(function (err, lines) { + + it(JSON.parse(lines.join(''))).deepEqual(expected) + console.error('PASSED') + }) + ) http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/bulkbadger/node_modules/JSONStream/test/stringify_object.js ---------------------------------------------------------------------- diff --git a/node_modules/bulkbadger/node_modules/JSONStream/test/stringify_object.js b/node_modules/bulkbadger/node_modules/JSONStream/test/stringify_object.js new file mode 100644 index 0000000..9490115 --- /dev/null +++ b/node_modules/bulkbadger/node_modules/JSONStream/test/stringify_object.js @@ -0,0 +1,47 @@ + +var fs = require ('fs') + , join = require('path').join + , file = join(__dirname, 'fixtures','all_npm.json') + , JSONStream = require('../') + , it = require('it-is').style('colour') + , es = require('event-stream') + , pending = 10 + , passed = true + + function randomObj () { + return ( + Math.random () < 0.4 + ? {hello: 'eonuhckmqjk', + whatever: 236515, + lies: true, + nothing: [null], + stuff: [Math.random(),Math.random(),Math.random()] + } + : ['AOREC', 'reoubaor', {ouec: 62642}, [[[], {}, 53]]] + ) + } + +for (var ix = 0; ix < pending; ix++) (function (count) { + var expected = {} + , stringify = JSONStream.stringifyObject() + + es.connect( + stringify, + es.writeArray(function (err, lines) { + it(JSON.parse(lines.join(''))).deepEqual(expected) + if (--pending === 0) { + console.error('PASSED') + } + }) + ) + + while (count --) { + var key = Math.random().toString(16).slice(2) + expected[key] = randomObj() + stringify.write([ key, expected[key] ]) + } + + process.nextTick(function () { + stringify.end() + }) +})(ix) http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/bulkbadger/node_modules/JSONStream/test/test.js ---------------------------------------------------------------------- diff --git a/node_modules/bulkbadger/node_modules/JSONStream/test/test.js b/node_modules/bulkbadger/node_modules/JSONStream/test/test.js new file mode 100644 index 0000000..8ea7c2e --- /dev/null +++ b/node_modules/bulkbadger/node_modules/JSONStream/test/test.js @@ -0,0 +1,35 @@ + + +var fs = require ('fs') + , join = require('path').join + , file = join(__dirname, 'fixtures','all_npm.json') + , JSONStream = require('../') + , it = require('it-is') + +var expected = JSON.parse(fs.readFileSync(file)) + , parser = JSONStream.parse(['rows', /\d+/ /*, 'value'*/]) + , called = 0 + , ended = false + , parsed = [] + +fs.createReadStream(file).pipe(parser) + +parser.on('data', function (data) { + called ++ + it.has({ + id: it.typeof('string'), + value: {rev: it.typeof('string')}, + key:it.typeof('string') + }) + parsed.push(data) +}) + +parser.on('end', function () { + ended = true +}) + +process.on('exit', function () { + it(called).equal(expected.rows.length) + it(parsed).deepEqual(expected.rows) + console.error('PASSED') +}) http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/bulkbadger/node_modules/JSONStream/test/test2.js ---------------------------------------------------------------------- diff --git a/node_modules/bulkbadger/node_modules/JSONStream/test/test2.js b/node_modules/bulkbadger/node_modules/JSONStream/test/test2.js new file mode 100644 index 0000000..d09df7b --- /dev/null +++ b/node_modules/bulkbadger/node_modules/JSONStream/test/test2.js @@ -0,0 +1,29 @@ + + +var fs = require ('fs') + , join = require('path').join + , file = join(__dirname, '..','package.json') + , JSONStream = require('../') + , it = require('it-is') + +var expected = JSON.parse(fs.readFileSync(file)) + , parser = JSONStream.parse([]) + , called = 0 + , ended = false + , parsed = [] + +fs.createReadStream(file).pipe(parser) + +parser.on('data', function (data) { + called ++ + it(data).deepEqual(expected) +}) + +parser.on('end', function () { + ended = true +}) + +process.on('exit', function () { + it(called).equal(1) + console.error('PASSED') +}) \ No newline at end of file http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/bulkbadger/node_modules/JSONStream/test/two-ways.js ---------------------------------------------------------------------- diff --git a/node_modules/bulkbadger/node_modules/JSONStream/test/two-ways.js b/node_modules/bulkbadger/node_modules/JSONStream/test/two-ways.js new file mode 100644 index 0000000..8f3b89c --- /dev/null +++ b/node_modules/bulkbadger/node_modules/JSONStream/test/two-ways.js @@ -0,0 +1,41 @@ + +var fs = require ('fs') + , join = require('path').join + , file = join(__dirname, 'fixtures','all_npm.json') + , JSONStream = require('../') + , it = require('it-is').style('colour') + + function randomObj () { + return ( + Math.random () < 0.4 + ? {hello: 'eonuhckmqjk', + whatever: 236515, + lies: true, + nothing: [null], +// stuff: [Math.random(),Math.random(),Math.random()] + } + : ['AOREC', 'reoubaor', {ouec: 62642}, [[[], {}, 53]]] + ) + } + +var expected = [] + , stringify = JSONStream.stringify() + , es = require('event-stream') + , stringified = '' + , called = 0 + , count = 10 + , ended = false + +while (count --) + expected.push(randomObj()) + + es.connect( + es.readArray(expected), + stringify, + JSONStream.parse([/./]), + es.writeArray(function (err, lines) { + + it(lines).has(expected) + console.error('PASSED') + }) + ) http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/bulkbadger/node_modules/stream-transform/.npmignore ---------------------------------------------------------------------- diff --git a/node_modules/bulkbadger/node_modules/stream-transform/.npmignore b/node_modules/bulkbadger/node_modules/stream-transform/.npmignore new file mode 100644 index 0000000..4c9bd0a --- /dev/null +++ b/node_modules/bulkbadger/node_modules/stream-transform/.npmignore @@ -0,0 +1,4 @@ +/src +/test +/Makefile +.travis.yml \ No newline at end of file http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/bulkbadger/node_modules/stream-transform/LICENSE ---------------------------------------------------------------------- diff --git a/node_modules/bulkbadger/node_modules/stream-transform/LICENSE b/node_modules/bulkbadger/node_modules/stream-transform/LICENSE new file mode 100644 index 0000000..f77b58e --- /dev/null +++ b/node_modules/bulkbadger/node_modules/stream-transform/LICENSE @@ -0,0 +1,16 @@ +Software License Agreement (BSD License) +======================================== + +Copyright (c) 2011, SARL Adaltas. + +All rights reserved. + +Redistribution and use of this software in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +- Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +- Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +- Neither the name of SARL Adaltas nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission of the SARL Adaltas. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/bulkbadger/node_modules/stream-transform/README.md ---------------------------------------------------------------------- diff --git a/node_modules/bulkbadger/node_modules/stream-transform/README.md b/node_modules/bulkbadger/node_modules/stream-transform/README.md new file mode 100644 index 0000000..c23c35a --- /dev/null +++ b/node_modules/bulkbadger/node_modules/stream-transform/README.md @@ -0,0 +1,65 @@ +[![Build Status](https://secure.travis-ci.org/wdavidw/node-csv-parse.png)][travis] + +Part of the [CSV module][csv_home], this project is a simple object +transformation framework. It implements the Node.js +[`stream.Transform` API][stream_transform]. It also provides a simple +callback-based API for convenience. It is both extremely easy to use and +powerful. + +[Documentation for the "csv-parse" package is available here][home]. + +## Features + +* Follow the Node.js [streaming API][streamtransform] +* Simplicity with the optional callback API +* Synchronous and asynchronous user handler functions +* Accepts arrays of strings, or arrays of objects as input +* Sequential or user-defined concurrent execution +* Skip and create new records +* Alter or clone input data +* BSD License + +Usage +----- + +Refer to the [project webpage][home] for [an exhaustive list of options][home] +and [some usage examples][examples]. + +The module is built on the Node.js Stream API. For the sake of simplify, a +simple callback API is also provided. To give you a quick look, here's an +example of the callback API: + +```javascript +var transform = require('stream-transform'); + +input = [ [ '1', '2', '3', '4' ], [ 'a', 'b', 'c', 'd' ] ]; +transform(input, function(data){ + data.push(data.shift()); + return data.join(',')+'\n'; +}, function(err, output){ + output.should.eql([ '2,3,4,1\n', 'b,c,d,a\n' ]); +}); +``` + +Development +----------- + +Tests are executed with mocha. To install it, simple run `npm install` +followed by `npm test`. It will install mocha and its dependencies in your +project "node_modules" directory and run the test suite. The tests run +against the CoffeeScript source files. + +To generate the JavaScript files, run `npm run coffee`. + +The test suite is run online with [Travis][travis] against the versions +0.10, 0.11 and 0.12 of Node.js. + + +[streamtransform]: http://nodejs.org/api/stream.html#stream_class_stream_transform +[home]: http://csv.adaltas.com/transform/ +[examples]: http://csv.adaltas.com/transform/examples/ +[csv_home]: https://github.com/wdavidw/node-csv +[stream-samples]: https://github.com/wdavidw/node-stream-transform/tree/master/samples +[stream-test]: https://github.com/wdavidw/node-stream-transform/tree/master/test +[travis]: http://travis-ci.org/wdavidw/node-stream-transform + http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/bulkbadger/node_modules/stream-transform/lib/index.js ---------------------------------------------------------------------- diff --git a/node_modules/bulkbadger/node_modules/stream-transform/lib/index.js b/node_modules/bulkbadger/node_modules/stream-transform/lib/index.js new file mode 100644 index 0000000..cd7246e --- /dev/null +++ b/node_modules/bulkbadger/node_modules/stream-transform/lib/index.js @@ -0,0 +1,158 @@ +// Generated by CoffeeScript 1.9.2 +var Transformer, stream, util, + slice = [].slice; + +stream = require('stream'); + +util = require('util'); + +module.exports = function() { + var argument, callback, data, error, handler, i, j, k, len, options, result, transform, type, v; + options = {}; + for (i = j = 0, len = arguments.length; j < len; i = ++j) { + argument = arguments[i]; + type = typeof argument; + if (argument === null) { + type = 'null'; + } else if (type === 'object' && Array.isArray(argument)) { + type = 'array'; + } + if (i === 0) { + if (type === 'function') { + handler = argument; + } else if (type !== null) { + data = argument; + } + continue; + } + if (type === 'object') { + for (k in argument) { + v = argument[k]; + options[k] = v; + } + } else if (type === 'function') { + if (handler && i === arguments.length - 1) { + callback = argument; + } else { + handler = argument; + } + } else if (type !== 'null') { + throw new Error('Invalid arguments'); + } + } + transform = new Transformer(options, handler); + error = false; + if (data) { + process.nextTick(function() { + var l, len1, row; + for (l = 0, len1 = data.length; l < len1; l++) { + row = data[l]; + if (error) { + break; + } + transform.write(row); + } + return transform.end(); + }); + } + if (callback) { + result = []; + transform.on('readable', function() { + var r, results; + results = []; + while ((r = transform.read())) { + results.push(result.push(r)); + } + return results; + }); + transform.on('error', function(err) { + error = true; + return callback(err); + }); + transform.on('end', function() { + if (!error) { + return callback(null, result); + } + }); + } + return transform; +}; + +Transformer = function(options1, transform1) { + var base; + this.options = options1 != null ? options1 : {}; + this.transform = transform1; + this.options.objectMode = true; + if ((base = this.options).parallel == null) { + base.parallel = 100; + } + stream.Transform.call(this, this.options); + this.running = 0; + this.started = 0; + this.finished = 0; + return this; +}; + +util.inherits(Transformer, stream.Transform); + +module.exports.Transformer = Transformer; + +Transformer.prototype._transform = function(chunk, encoding, cb) { + var err; + this.started++; + this.running++; + if (this.running < this.options.parallel) { + cb(); + cb = null; + } + try { + if (this.transform.length === 2) { + this.transform.call(null, chunk, (function(_this) { + return function() { + var chunks, err; + err = arguments[0], chunks = 2 <= arguments.length ? slice.call(arguments, 1) : []; + return _this._done(err, chunks, cb); + }; + })(this)); + } else { + this._done(null, [this.transform.call(null, chunk)], cb); + } + return false; + } catch (_error) { + err = _error; + return this._done(err); + } +}; + +Transformer.prototype._flush = function(cb) { + this._ending = function() { + if (this.running === 0) { + return cb(); + } + }; + return this._ending(); +}; + +Transformer.prototype._done = function(err, chunks, cb) { + var chunk, j, len; + this.running--; + if (err) { + return this.emit('error', err); + } + this.finished++; + for (j = 0, len = chunks.length; j < len; j++) { + chunk = chunks[j]; + if (typeof chunk === 'number') { + chunk = "" + chunk; + } + if (chunk != null) { + this.push(chunk); + } + } + if (cb) { + cb(); + } + if (this._ending) { + return this._ending(); + } +}; http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/bulkbadger/node_modules/stream-transform/package.json ---------------------------------------------------------------------- diff --git a/node_modules/bulkbadger/node_modules/stream-transform/package.json b/node_modules/bulkbadger/node_modules/stream-transform/package.json new file mode 100644 index 0000000..8de70df --- /dev/null +++ b/node_modules/bulkbadger/node_modules/stream-transform/package.json @@ -0,0 +1,54 @@ +{ + "version": "0.1.0", + "name": "stream-transform", + "description": "Object transformations implementing the Node.js `stream.Transform` API", + "keywords": [ + "stream", + "transform", + "csv", + "object" + ], + "license": "BSD-3-Clause", + "repository": { + "type": "git", + "url": "http://www.github.com/wdavidw/node-stream-transform" + }, + "homepage": "http://csv.adaltas.com/transform/", + "dependencies": {}, + "devDependencies": { + "coffee-script": "latest", + "pad": "latest", + "mocha": "latest", + "csv-generate": "latest", + "should": "latest" + }, + "optionalDependencies": {}, + "main": "./lib", + "scripts": { + "coffee": "coffee -b -o lib src", + "pretest": "coffee -b -o lib src", + "test": "NODE_ENV=test ./node_modules/.bin/mocha --compilers coffee:coffee-script/register --reporter dot" + }, + "gitHead": "1e56e58222733f0767a03cf9c16c79364903a507", + "_id": "stream-transform@0.1.0", + "_shasum": "6bd0d47b79f48b34ff21f6b9393126427e61f485", + "_from": "stream-transform@>=0.1.0 <0.2.0", + "_npmVersion": "1.4.28", + "_npmUser": { + "name": "david", + "email": "david@adaltas.com" + }, + "maintainers": [ + { + "name": "david", + "email": "david@adaltas.com" + } + ], + "dist": { + "shasum": "6bd0d47b79f48b34ff21f6b9393126427e61f485", + "tarball": "http://registry.npmjs.org/stream-transform/-/stream-transform-0.1.0.tgz" + }, + "directories": {}, + "_resolved": "https://registry.npmjs.org/stream-transform/-/stream-transform-0.1.0.tgz", + "readme": "ERROR: No README data found!" +} http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/bulkbadger/node_modules/stream-transform/samples/asynchronous.js ---------------------------------------------------------------------- diff --git a/node_modules/bulkbadger/node_modules/stream-transform/samples/asynchronous.js b/node_modules/bulkbadger/node_modules/stream-transform/samples/asynchronous.js new file mode 100644 index 0000000..a65d194 --- /dev/null +++ b/node_modules/bulkbadger/node_modules/stream-transform/samples/asynchronous.js @@ -0,0 +1,17 @@ + +var transform = require('..'); + +transform([ + ['1','2','3','4'], + ['a','b','c','d'] +], function(data, callback){ + setImmediate(function(){ + data.push(data.shift()); + callback(null, data.join(',')+'\n'); + }); +}, {parallel: 20}) +.pipe(process.stdout); + +// Output: +// 2,3,4,1 +// b,c,d,a http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/bulkbadger/node_modules/stream-transform/samples/callback.js ---------------------------------------------------------------------- diff --git a/node_modules/bulkbadger/node_modules/stream-transform/samples/callback.js b/node_modules/bulkbadger/node_modules/stream-transform/samples/callback.js new file mode 100644 index 0000000..32db1e9 --- /dev/null +++ b/node_modules/bulkbadger/node_modules/stream-transform/samples/callback.js @@ -0,0 +1,13 @@ + +var transform = require('..'); +var should = require('should'); + +transform([ + ['1','2','3','4'], + ['a','b','c','d'] +], function(data){ + data.push(data.shift()) + return data; +}, function(err, output){ + output.should.eql([ [ '2', '3', '4', '1' ], [ 'b', 'c', 'd', 'a' ] ]); +}); http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/bulkbadger/node_modules/stream-transform/samples/stream.js ---------------------------------------------------------------------- diff --git a/node_modules/bulkbadger/node_modules/stream-transform/samples/stream.js b/node_modules/bulkbadger/node_modules/stream-transform/samples/stream.js new file mode 100644 index 0000000..a9282a9 --- /dev/null +++ b/node_modules/bulkbadger/node_modules/stream-transform/samples/stream.js @@ -0,0 +1,23 @@ + +var transform = require('..'); +var should = require('should'); + +var output = []; +var transformer = transform(function(data){ + data.push(data.shift()) + return data; +}); +transformer.on('readable', function(){ + while(row = transformer.read()){ + output.push(row); + } +}); +transformer.on('error', function(err){ + console.log(err.message); +}); +transformer.on('finish', function(){ + output.should.eql([ [ '2', '3', '4', '1' ], [ 'b', 'c', 'd', 'a' ] ]); +}); +transformer.write(['1','2','3','4']); +transformer.write(['a','b','c','d']); +transformer.end(); http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/bulkbadger/node_modules/stream-transform/samples/synchronous.js ---------------------------------------------------------------------- diff --git a/node_modules/bulkbadger/node_modules/stream-transform/samples/synchronous.js b/node_modules/bulkbadger/node_modules/stream-transform/samples/synchronous.js new file mode 100644 index 0000000..1c3caf7 --- /dev/null +++ b/node_modules/bulkbadger/node_modules/stream-transform/samples/synchronous.js @@ -0,0 +1,15 @@ + +var transform = require('..'); + +transform([ + ['1','2','3','4'], + ['a','b','c','d'] +], function(data){ + data.push(data.shift()); + return data.join(',')+'\n'; +}) +.pipe(process.stdout); + +// Output: +// 2,3,4,1 +// b,c,d,a http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/bulkbadger/package.json ---------------------------------------------------------------------- diff --git a/node_modules/bulkbadger/package.json b/node_modules/bulkbadger/package.json new file mode 100644 index 0000000..2b658cd --- /dev/null +++ b/node_modules/bulkbadger/package.json @@ -0,0 +1,63 @@ +{ + "name": "bulkbadger", + "version": "1.0.0", + "description": "batch ldjson suitable for couchdb's _bulk_docs", + "main": "index.js", + "scripts": { + "test": "tap test" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/robertkowalski/bulkbadger.git" + }, + "keywords": [ + "couchdb" + ], + "author": { + "name": "Robert Kowalski", + "email": "rok@kowalski.gd" + }, + "license": "BSD-2-Clause", + "bugs": { + "url": "https://github.com/robertkowalski/bulkbadger/issues" + }, + "homepage": "https://github.com/robertkowalski/bulkbadger", + "devDependencies": { + "hock": "~1.2.0", + "tap": "~1.3.2", + "couchbulkimporter": "*" + }, + "optionalDependencies": { + "csv-parse": "~1.0.0", + "JSONStream": "~1.0.3", + "stream-transform": "~0.1.0" + }, + "gitHead": "d6e7835d273ce91d35de7c7084a60b3dfeba4416", + "dependencies": { + "csv-parse": "~1.0.0", + "JSONStream": "~1.0.3", + "stream-transform": "~0.1.0" + }, + "_id": "bulkbadger@1.0.0", + "_shasum": "f9cb9c249d4e7f7c7e00aceef17e6b96d2937479", + "_from": "bulkbadger@>=1.0.0 <2.0.0", + "_npmVersion": "2.7.4", + "_nodeVersion": "0.12.2", + "_npmUser": { + "name": "robertkowalski", + "email": "rok@kowalski.gd" + }, + "maintainers": [ + { + "name": "robertkowalski", + "email": "rok@kowalski.gd" + } + ], + "dist": { + "shasum": "f9cb9c249d4e7f7c7e00aceef17e6b96d2937479", + "tarball": "http://registry.npmjs.org/bulkbadger/-/bulkbadger-1.0.0.tgz" + }, + "directories": {}, + "_resolved": "https://registry.npmjs.org/bulkbadger/-/bulkbadger-1.0.0.tgz", + "readme": "ERROR: No README data found!" +} http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/bulkbadger/test/01-unit.js ---------------------------------------------------------------------- diff --git a/node_modules/bulkbadger/test/01-unit.js b/node_modules/bulkbadger/test/01-unit.js new file mode 100644 index 0000000..23cbb4d --- /dev/null +++ b/node_modules/bulkbadger/test/01-unit.js @@ -0,0 +1,72 @@ +const test = require('tap').test +const BulkBadger = require('../') + +const PassThrough = require('stream').PassThrough + +test('three elements - default chunksize', function (t) { + + const readable = new PassThrough({objectMode: true}) + const pt = new PassThrough({objectMode: true}) + + pt.on('data', function (data) { + t.deepEqual(data, {docs: [{a: 1}, {a: 2}, {a: 3}]}) + t.end() + }) + + readable + .pipe(new BulkBadger()) + .pipe(pt) + + readable.write({a: 1}) + readable.write({a: 2}) + readable.write({a: 3}) + readable.end() +}) + + +test('three elements - chunksize 1 element', function (t) { + + const res = [] + const readable = new PassThrough({objectMode: true}) + const pt = new PassThrough({objectMode: true}) + + pt + .on('data', function (data) { + + res.push(data) + }) + .on('finish', function () { + t.deepEqual(res, [{docs: [{a: 1}]}, {docs: [{a: 2}]}, {docs: [{a: 3}]}]) + t.end() + }) + + readable + .pipe(new BulkBadger({chunksize: 1})) + .pipe(pt) + + + readable.write({a: 1}) + readable.write({a: 2}) + readable.write({a: 3}) + readable.end() +}) + +test('three elements - chunksize 10 elements', function (t) { + + const readable = new PassThrough({objectMode: true}) + const pt = new PassThrough({objectMode: true}) + + pt.on('data', function (data) { + t.deepEqual(data, {docs: [{a: 1}, {a: 2}, {a: 3}]}) + t.end() + }) + + readable + .pipe(new BulkBadger({chunksize: 10})) + .pipe(pt) + + readable.write({a: 1}) + readable.write({a: 2}) + readable.write({a: 3}) + readable.end() +}) http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/bulkbadger/test/02-integration.js ---------------------------------------------------------------------- diff --git a/node_modules/bulkbadger/test/02-integration.js b/node_modules/bulkbadger/test/02-integration.js new file mode 100644 index 0000000..788fbdb --- /dev/null +++ b/node_modules/bulkbadger/test/02-integration.js @@ -0,0 +1,50 @@ +const test = require('tap').test +const BulkBadger = require('../') + +const CouchBulkImporter = require('couchbulkimporter') +const http = require('http') + +const PassThrough = require('stream').PassThrough + + +test('sends data to CouchDB', function (t) { + + t.plan(4) + const results = [ + '{}', + '{"docs":[{"a":1}]}', + '{"docs":[{"a":2}]}', + '{"docs":[{"a":3}]}' + ] + + const server = http.createServer(function (req, res) { + req.on('data', function (data) { + t.equal(results.shift(), data.toString()) + }) + res.statusCode = 201 + res.end('{"ok": "true"}') + }) + + server.listen(1337, function () { + const readable = new PassThrough({objectMode: true}) + + const couchbulk = new CouchBulkImporter({ + url: 'http://localhost:1337/baseball' + }) + + couchbulk.on('finish', function () { + server.close(function () { + t.end() + }) + }) + + readable + .pipe(new BulkBadger({chunksize: 1})) + .pipe(couchbulk) + + readable.write({a: 1}) + readable.write({a: 2}) + readable.write({a: 3}) + readable.end() + }) +}) http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/.npmignore ---------------------------------------------------------------------- diff --git a/node_modules/couchbulkimporter/.npmignore b/node_modules/couchbulkimporter/.npmignore new file mode 100644 index 0000000..7a0fc07 --- /dev/null +++ b/node_modules/couchbulkimporter/.npmignore @@ -0,0 +1,14 @@ +*.swp +.*.swp + +.DS_Store +*~ +.project +.settings +npm-debug.log +coverage.html +.idea +lib-cov + +node_modules + http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/LICENSE ---------------------------------------------------------------------- diff --git a/node_modules/couchbulkimporter/LICENSE b/node_modules/couchbulkimporter/LICENSE new file mode 100644 index 0000000..6c53ca0 --- /dev/null +++ b/node_modules/couchbulkimporter/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) Robert Kowalski ("Author") +All rights reserved. + +The BSD License + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS'' AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS +BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR +BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE +OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN +IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/README.md ---------------------------------------------------------------------- diff --git a/node_modules/couchbulkimporter/README.md b/node_modules/couchbulkimporter/README.md new file mode 100644 index 0000000..58245b1 --- /dev/null +++ b/node_modules/couchbulkimporter/README.md @@ -0,0 +1,156 @@ +# CouchBulkImporter + +Takes a stream of docs, e.g. from `bulkbadger` and imports them into +CouchDB using the /_bulk_docs endpoint + + +**Usage examples:** + + - Migrate data from MongoDB to CouchDB + - Migrate data from Postgres to CouchDB + - Import data from CSV files into CouchDB + - Import data from JSON files into CouchDB + + +## Examples + +### Use a regular JSON file from the fs as input + +**testjson.json:** + +```js +[ + {"a": "b"}, + {"b": "c"}, + {"c": "d"} +] + +``` + +```js +var CouchBulkImporter = require('couchbulkimporter') +var BulkBadger = require('bulkbadger') + +var fs = require('fs') +var JSONStream = require('JSONStream') + + +fs + .createReadStream(__dirname + '/testjson.json') + .pipe(JSONStream.parse('*')) + .pipe(new BulkBadger()) + .pipe(new CouchBulkImporter({ + url: 'http://tester:testerpass@localhost:5984/jsonstreamfromfile' + })) + + +``` + +### Use a CSV file as input + + +```js +var CouchBulkImporter = require('couchbulkimporter') +var BulkBadger = require('bulkbadger') + + +var parse = require('csv-parse') +var fs = require('fs') +var transform = require('stream-transform') +var JSONStream = require('JSONStream') + +var parser = parse({comment: '#', delimiter: ':'}) +var input = fs.createReadStream('/etc/passwd') + + +var transformer = transform(function (record, cb) { + + var username = record[0] + var pw = record[1] + var uid = record[2] + var gid = record[3] + var comment = record[4] + var home = record[5] + var shell = record[6] + + cb(null, { + id: username, + pw: pw, + uid: uid, + gid: gid, + comment: comment, + home: home, + shell: shell + }) +}) + +input + .pipe(parser) + .pipe(transformer) + .pipe(new BulkBadger()) + .pipe(new CouchBulkImporter({ + url: 'http://tester:testerpass@localhost:5984/etcpasswd' + })) + +``` + +### Stream from MongoDB into CouchDB + +```js +var MongoClient = require('mongodb').MongoClient +var BulkBadger = require('bulkbadger') +var CouchBulkImporter = require('couchbulkimporter') + +var url = 'mongodb://localhost:27017/test' +// Use connect method to connect to the Server +MongoClient.connect(url, function (err, db) { + console.log('Connected correctly to server') + var col = db.collection('restaurants') + var stream = col.find({}, {}) + stream + .pipe(new BulkBadger({chunksize: 500})) + .pipe(new CouchBulkImporter({ + url: 'http://tester:testerpass@localhost:5984/hellofrommongo' + })).on('error', function (e) { + console.log('Oh noes!') + console.log(e) + }) + + stream.on('error', function (e) { + console.log('Oh noes!') + console.log(e) + }) + stream.on('end', function () { + console.log('migration finished') + db.close() + }) +}) + +``` + +### Use Line-Delimited JSON as input + +**ldjson.json:** + +```js +{"rocko": "artischocko"} +{"zett": "zettmeister"} +{"mr": "mussie"} +``` + +```js +var CouchBulkImporter = require('couchbulkimporter') +var BulkBadger = require('bulkbadger') + +var fs = require('fs') +var JSONStream = require('JSONStream') + + +fs + .createReadStream(__dirname + '/ldjson.json') + .pipe(JSONStream.parse()) + .pipe(new BulkBadger()) + .pipe(new CouchBulkImporter({ + url: 'http://tester:testerpass@localhost:5984/ldjsonhellooo' + })) +``` http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/examples/csv.js ---------------------------------------------------------------------- diff --git a/node_modules/couchbulkimporter/examples/csv.js b/node_modules/couchbulkimporter/examples/csv.js new file mode 100644 index 0000000..af52814 --- /dev/null +++ b/node_modules/couchbulkimporter/examples/csv.js @@ -0,0 +1,40 @@ +var CouchBulkImporter = require('../') +var BulkBadger = require('bulkbadger') + + +var parse = require('csv-parse') +var fs = require('fs') +var transform = require('stream-transform') + +var parser = parse({comment: '#', delimiter: ':'}) +var input = fs.createReadStream('/etc/passwd') + + +var transformer = transform(function (record, cb) { + + var username = record[0] + var pw = record[1] + var uid = record[2] + var gid = record[3] + var comment = record[4] + var home = record[5] + var shell = record[6] + + cb(null, { + id: username, + pw: pw, + uid: uid, + gid: gid, + comment: comment, + home: home, + shell: shell + }) +}) + +input + .pipe(parser) + .pipe(transformer) + .pipe(new BulkBadger()) + .pipe(new CouchBulkImporter({ + url: 'http://tester:testerpass@localhost:5984/etcpasswd' + })) http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/examples/json.js ---------------------------------------------------------------------- diff --git a/node_modules/couchbulkimporter/examples/json.js b/node_modules/couchbulkimporter/examples/json.js new file mode 100644 index 0000000..83e246b --- /dev/null +++ b/node_modules/couchbulkimporter/examples/json.js @@ -0,0 +1,14 @@ +var CouchBulkImporter = require('../') +var BulkBadger = require('bulkbadger') + +var fs = require('fs') +var JSONStream = require('JSONStream') + + +fs + .createReadStream(__dirname + '/testjson.json') + .pipe(JSONStream.parse('*')) + .pipe(new BulkBadger()) + .pipe(new CouchBulkImporter({ + url: 'http://tester:testerpass@localhost:5984/jsonstreamfromfile' + })) http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/examples/ldjson.js ---------------------------------------------------------------------- diff --git a/node_modules/couchbulkimporter/examples/ldjson.js b/node_modules/couchbulkimporter/examples/ldjson.js new file mode 100644 index 0000000..49650de --- /dev/null +++ b/node_modules/couchbulkimporter/examples/ldjson.js @@ -0,0 +1,14 @@ +var CouchBulkImporter = require('../') +var BulkBadger = require('bulkbadger') + +var fs = require('fs') +var JSONStream = require('JSONStream') + + +fs + .createReadStream(__dirname + '/ldjson.json') + .pipe(JSONStream.parse()) + .pipe(new BulkBadger()) + .pipe(new CouchBulkImporter({ + url: 'http://tester:testerpass@localhost:5984/ldjsonhellooo' + })) http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/examples/ldjson.json ---------------------------------------------------------------------- diff --git a/node_modules/couchbulkimporter/examples/ldjson.json b/node_modules/couchbulkimporter/examples/ldjson.json new file mode 100644 index 0000000..db96ee7 --- /dev/null +++ b/node_modules/couchbulkimporter/examples/ldjson.json @@ -0,0 +1,3 @@ +{"rocko": "artischocko"} +{"zett": "zettmeister"} +{"mr": "mussie"} http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/examples/mongo.js ---------------------------------------------------------------------- diff --git a/node_modules/couchbulkimporter/examples/mongo.js b/node_modules/couchbulkimporter/examples/mongo.js new file mode 100644 index 0000000..63a9a08 --- /dev/null +++ b/node_modules/couchbulkimporter/examples/mongo.js @@ -0,0 +1,28 @@ +var MongoClient = require('mongodb').MongoClient +var BulkBadger = require('bulkbadger') +var CouchBulkImporter = require('../') + +var url = 'mongodb://localhost:27017/test' +// Use connect method to connect to the Server +MongoClient.connect(url, function (err, db) { + console.log('Connected correctly to server') + var col = db.collection('restaurants') + var stream = col.find({}, {}) + stream + .pipe(new BulkBadger({chunksize: 500})) + .pipe(new CouchBulkImporter({ + url: 'http://tester:testerpass@localhost:5984/hellofrommongo' + })).on('error', function (e) { + console.log('Oh noes!') + console.log(e) + }) + + stream.on('error', function (e) { + console.log('Oh noes!') + console.log(e) + }) + stream.on('end', function () { + console.log('migration finished') + db.close() + }) +}) http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/examples/testjson.json ---------------------------------------------------------------------- diff --git a/node_modules/couchbulkimporter/examples/testjson.json b/node_modules/couchbulkimporter/examples/testjson.json new file mode 100644 index 0000000..b9c562a --- /dev/null +++ b/node_modules/couchbulkimporter/examples/testjson.json @@ -0,0 +1,5 @@ +[ + {"a": "b"}, + {"b": "c"}, + {"c": "d"} +] http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/index.js ---------------------------------------------------------------------- diff --git a/node_modules/couchbulkimporter/index.js b/node_modules/couchbulkimporter/index.js new file mode 100644 index 0000000..75ea7ee --- /dev/null +++ b/node_modules/couchbulkimporter/index.js @@ -0,0 +1,72 @@ +const request = require('request') +const Writable = require('stream').Writable +const util = require('util') + +module.exports = CouchBulkImporter +function CouchBulkImporter (options) { + if (!options) options = {} + if (!options.url) { + const msg = [ + 'options.url must be set', + 'example:', + "new CouchBulkImporter({url: 'http://localhost:5984/baseball'})" + ].join('\n') + throw new Error(msg) + } + + Writable.call(this, { + decodeStrings: false, + objectMode: true + }) + + this.url = options.url.replace(/\/$/, '') + this.targetDatabaseCreated = false +} + +util.inherits(CouchBulkImporter, Writable) + +CouchBulkImporter.prototype._write = write +function write (chunk, enc, done) { + + if (this.targetDatabaseCreated) + return importChunk.apply(this) + + createTargetDatabase.apply(this) + function createTargetDatabase () { + request({ + json: true, + uri: this.url, + method: 'PUT', + body: {} + }, function (er, res, body) { + if (er) return done(er) + const code = res.statusCode + if (code !== 200 && code !== 201 && code !== 412) { + const msg = 'CouchDB server answered: \n Status: ' + + res.statusCode + '\n Body: ' + JSON.stringify(body) + return done(new Error(msg)) + } + + this.targetDatabaseCreated = true + importChunk.apply(this) + }.bind(this)) + } + + function importChunk () { + request({ + json: true, + uri: this.url + '/_bulk_docs', + method: 'POST', + body: chunk + }, function (er, res, body) { + if (er) return done(er) + if (!/^2../.test(res.statusCode)) { + const msg = 'CouchDB server answered: \n Status: ' + + res.statusCode + '\n Body: ' + JSON.stringify(body) + return done(new Error(msg)) + } + + done() + }) + } +} http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/.bin/JSONStream ---------------------------------------------------------------------- diff --git a/node_modules/couchbulkimporter/node_modules/.bin/JSONStream b/node_modules/couchbulkimporter/node_modules/.bin/JSONStream new file mode 120000 index 0000000..4490737 --- /dev/null +++ b/node_modules/couchbulkimporter/node_modules/.bin/JSONStream @@ -0,0 +1 @@ +../JSONStream/index.js \ No newline at end of file http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/.npmignore ---------------------------------------------------------------------- diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/.npmignore b/node_modules/couchbulkimporter/node_modules/JSONStream/.npmignore new file mode 100644 index 0000000..a9a9d58 --- /dev/null +++ b/node_modules/couchbulkimporter/node_modules/JSONStream/.npmignore @@ -0,0 +1,2 @@ +node_modules/* +node_modules http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/.travis.yml ---------------------------------------------------------------------- diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/.travis.yml b/node_modules/couchbulkimporter/node_modules/JSONStream/.travis.yml new file mode 100644 index 0000000..6e5919d --- /dev/null +++ b/node_modules/couchbulkimporter/node_modules/JSONStream/.travis.yml @@ -0,0 +1,3 @@ +language: node_js +node_js: + - "0.10" http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/LICENSE.APACHE2 ---------------------------------------------------------------------- diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/LICENSE.APACHE2 b/node_modules/couchbulkimporter/node_modules/JSONStream/LICENSE.APACHE2 new file mode 100644 index 0000000..6366c04 --- /dev/null +++ b/node_modules/couchbulkimporter/node_modules/JSONStream/LICENSE.APACHE2 @@ -0,0 +1,15 @@ +Apache License, Version 2.0 + +Copyright (c) 2011 Dominic Tarr + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/LICENSE.MIT ---------------------------------------------------------------------- diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/LICENSE.MIT b/node_modules/couchbulkimporter/node_modules/JSONStream/LICENSE.MIT new file mode 100644 index 0000000..6eafbd7 --- /dev/null +++ b/node_modules/couchbulkimporter/node_modules/JSONStream/LICENSE.MIT @@ -0,0 +1,24 @@ +The MIT License + +Copyright (c) 2011 Dominic Tarr + +Permission is hereby granted, free of charge, +to any person obtaining a copy of this software and +associated documentation files (the "Software"), to +deal in the Software without restriction, including +without limitation the rights to use, copy, modify, +merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom +the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR +ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/examples/all_docs.js ---------------------------------------------------------------------- diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/examples/all_docs.js b/node_modules/couchbulkimporter/node_modules/JSONStream/examples/all_docs.js new file mode 100644 index 0000000..fa87fe5 --- /dev/null +++ b/node_modules/couchbulkimporter/node_modules/JSONStream/examples/all_docs.js @@ -0,0 +1,13 @@ +var request = require('request') + , JSONStream = require('JSONStream') + , es = require('event-stream') + +var parser = JSONStream.parse(['rows', true]) //emit parts that match this path (any element of the rows array) + , req = request({url: 'http://isaacs.couchone.com/registry/_all_docs'}) + , logger = es.mapSync(function (data) { //create a stream that logs to stderr, + console.error(data) + return data + }) + +req.pipe(parser) +parser.pipe(logger) http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/index.js ---------------------------------------------------------------------- diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/index.js b/node_modules/couchbulkimporter/node_modules/JSONStream/index.js new file mode 100755 index 0000000..ec30105 --- /dev/null +++ b/node_modules/couchbulkimporter/node_modules/JSONStream/index.js @@ -0,0 +1,201 @@ +#! /usr/bin/env node + +var Parser = require('jsonparse') + , through = require('through') + +/* + + the value of this.stack that creationix's jsonparse has is weird. + + it makes this code ugly, but his problem is way harder that mine, + so i'll forgive him. + +*/ + +exports.parse = function (path, map) { + + var parser = new Parser() + var stream = through(function (chunk) { + if('string' === typeof chunk) + chunk = new Buffer(chunk) + parser.write(chunk) + }, + function (data) { + if(data) + stream.write(data) + stream.queue(null) + }) + + if('string' === typeof path) + path = path.split('.').map(function (e) { + if (e === '*') + return true + else if (e === '') // '..'.split('.') returns an empty string + return {recurse: true} + else + return e + }) + + + var count = 0, _key + if(!path || !path.length) + path = null + + parser.onValue = function (value) { + if (!this.root) + stream.root = value + + if(! path) return + + var i = 0 // iterates on path + var j = 0 // iterates on stack + while (i < path.length) { + var key = path[i] + var c + j++ + + if (key && !key.recurse) { + c = (j === this.stack.length) ? this : this.stack[j] + if (!c) return + if (! check(key, c.key)) return + i++ + } else { + i++ + var nextKey = path[i] + if (! nextKey) return + while (true) { + c = (j === this.stack.length) ? this : this.stack[j] + if (!c) return + if (check(nextKey, c.key)) { + i++; + this.stack[j].value = null + break + } + j++ + } + } + + } + if (j !== this.stack.length) return + + count ++ + var actualPath = this.stack.slice(1).map(function(element) { return element.key }).concat([this.key]) + var data = this.value[this.key] + if(null != data) + if(null != (data = map ? map(data, actualPath) : data)) + stream.queue(data) + delete this.value[this.key] + for(var k in this.stack) + this.stack[k].value = null + } + parser._onToken = parser.onToken; + + parser.onToken = function (token, value) { + parser._onToken(token, value); + if (this.stack.length === 0) { + if (stream.root) { + if(!path) + stream.queue(stream.root) + count = 0; + stream.root = null; + } + } + } + + parser.onError = function (err) { + if(err.message.indexOf("at position") > -1) + err.message = "Invalid JSON (" + err.message + ")"; + stream.emit('error', err) + } + + + return stream +} + +function check (x, y) { + if ('string' === typeof x) + return y == x + else if (x && 'function' === typeof x.exec) + return x.exec(y) + else if ('boolean' === typeof x) + return x + else if ('function' === typeof x) + return x(y) + return false +} + +exports.stringify = function (op, sep, cl, indent) { + indent = indent || 0 + if (op === false){ + op = '' + sep = '\n' + cl = '' + } else if (op == null) { + + op = '[\n' + sep = '\n,\n' + cl = '\n]\n' + + } + + //else, what ever you like + + var stream + , first = true + , anyData = false + stream = through(function (data) { + anyData = true + var json = JSON.stringify(data, null, indent) + if(first) { first = false ; stream.queue(op + json)} + else stream.queue(sep + json) + }, + function (data) { + if(!anyData) + stream.queue(op) + stream.queue(cl) + stream.queue(null) + }) + + return stream +} + +exports.stringifyObject = function (op, sep, cl, indent) { + indent = indent || 0 + if (op === false){ + op = '' + sep = '\n' + cl = '' + } else if (op == null) { + + op = '{\n' + sep = '\n,\n' + cl = '\n}\n' + + } + + //else, what ever you like + + var first = true + , anyData = false + stream = through(function (data) { + anyData = true + var json = JSON.stringify(data[0]) + ':' + JSON.stringify(data[1], null, indent) + if(first) { first = false ; this.queue(op + json)} + else this.queue(sep + json) + }, + function (data) { + if(!anyData) this.queue(op) + this.queue(cl) + + this.queue(null) + }) + + return stream +} + +if(!module.parent && process.title !== 'browser') { + process.stdin + .pipe(exports.parse(process.argv[2])) + .pipe(exports.stringify('[', ',\n', ']\n', 2)) + .pipe(process.stdout) +} http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/.npmignore ---------------------------------------------------------------------- diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/.npmignore b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/.npmignore new file mode 100644 index 0000000..b512c09 --- /dev/null +++ b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/.npmignore @@ -0,0 +1 @@ +node_modules \ No newline at end of file http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/LICENSE ---------------------------------------------------------------------- diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/LICENSE b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/LICENSE new file mode 100644 index 0000000..6dc24be --- /dev/null +++ b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/LICENSE @@ -0,0 +1,24 @@ +The MIT License + +Copyright (c) 2012 Tim Caswell + +Permission is hereby granted, free of charge, +to any person obtaining a copy of this software and +associated documentation files (the "Software"), to +deal in the Software without restriction, including +without limitation the rights to use, copy, modify, +merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom +the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR +ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/README.markdown ---------------------------------------------------------------------- diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/README.markdown b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/README.markdown new file mode 100644 index 0000000..0f405d3 --- /dev/null +++ b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/README.markdown @@ -0,0 +1,11 @@ +This is a streaming JSON parser. For a simpler, sax-based version see this gist: https://gist.github.com/1821394 + +The MIT License (MIT) +Copyright (c) 2011-2012 Tim Caswell + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/bench.js ---------------------------------------------------------------------- diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/bench.js b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/bench.js new file mode 100644 index 0000000..b36d92f --- /dev/null +++ b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/bench.js @@ -0,0 +1,26 @@ +var fs = require('fs'), + Parser = require('./jsonparse'); + + +var json = fs.readFileSync("samplejson/basic.json"); + + +while (true) { + var start = Date.now(); + for (var i = 0; i < 1000; i++) { + JSON.parse(json); + } + var first = Date.now() - start; + + start = Date.now(); + var p = new Parser(); + for (var i = 0; i < 1000; i++) { + p.write(json); + } + var second = Date.now() - start; + + + console.log("JSON.parse took %s", first); + console.log("streaming parser took %s", second); + console.log("streaming is %s times slower", second / first); +} http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/examples/twitterfeed.js ---------------------------------------------------------------------- diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/examples/twitterfeed.js b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/examples/twitterfeed.js new file mode 100644 index 0000000..10210d4 --- /dev/null +++ b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/examples/twitterfeed.js @@ -0,0 +1,30 @@ +var Parser = require('../jsonparse'); +var Http = require('http'); +require('./colors'); +var p = new Parser(); +var cred = require('./credentials'); +var client = Http.createClient(80, "stream.twitter.com"); +var request = client.request("GET", "/1/statuses/sample.json", { + "Host": "stream.twitter.com", + "Authorization": (new Buffer(cred.username + ":" + cred.password)).toString("base64") +}); +request.on('response', function (response) { + console.log(response.statusCode); + console.dir(response.headers); + response.on('data', function (chunk) { + p.write(chunk); + }); + response.on('end', function () { + console.log("END"); + }); +}); +request.end(); +var text = "", name = ""; +p.onValue = function (value) { + if (this.stack.length === 1 && this.key === 'text') { text = value; } + if (this.stack.length === 2 && this.key === 'name' && this.stack[1].key === 'user') { name = value; } + if (this.stack.length === 0) { + console.log(text.blue + " - " + name.yellow); + text = name = ""; + } +}; http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/jsonparse.js ---------------------------------------------------------------------- diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/jsonparse.js b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/jsonparse.js new file mode 100644 index 0000000..70d9bd0 --- /dev/null +++ b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/jsonparse.js @@ -0,0 +1,341 @@ +/*global Buffer*/ +// Named constants with unique integer values +var C = {}; +// Tokens +var LEFT_BRACE = C.LEFT_BRACE = 0x1; +var RIGHT_BRACE = C.RIGHT_BRACE = 0x2; +var LEFT_BRACKET = C.LEFT_BRACKET = 0x3; +var RIGHT_BRACKET = C.RIGHT_BRACKET = 0x4; +var COLON = C.COLON = 0x5; +var COMMA = C.COMMA = 0x6; +var TRUE = C.TRUE = 0x7; +var FALSE = C.FALSE = 0x8; +var NULL = C.NULL = 0x9; +var STRING = C.STRING = 0xa; +var NUMBER = C.NUMBER = 0xb; +// Tokenizer States +var START = C.START = 0x11; +var STOP = C.STOP = 0x12; +var TRUE1 = C.TRUE1 = 0x21; +var TRUE2 = C.TRUE2 = 0x22; +var TRUE3 = C.TRUE3 = 0x23; +var FALSE1 = C.FALSE1 = 0x31; +var FALSE2 = C.FALSE2 = 0x32; +var FALSE3 = C.FALSE3 = 0x33; +var FALSE4 = C.FALSE4 = 0x34; +var NULL1 = C.NULL1 = 0x41; +var NULL2 = C.NULL2 = 0x42; +var NULL3 = C.NULL3 = 0x43; +var NUMBER1 = C.NUMBER1 = 0x51; +var NUMBER3 = C.NUMBER3 = 0x53; +var STRING1 = C.STRING1 = 0x61; +var STRING2 = C.STRING2 = 0x62; +var STRING3 = C.STRING3 = 0x63; +var STRING4 = C.STRING4 = 0x64; +var STRING5 = C.STRING5 = 0x65; +var STRING6 = C.STRING6 = 0x66; +// Parser States +var VALUE = C.VALUE = 0x71; +var KEY = C.KEY = 0x72; +// Parser Modes +var OBJECT = C.OBJECT = 0x81; +var ARRAY = C.ARRAY = 0x82; + + +function Parser() { + this.tState = START; + this.value = undefined; + + this.string = undefined; // string data + this.unicode = undefined; // unicode escapes + + this.key = undefined; + this.mode = undefined; + this.stack = []; + this.state = VALUE; + this.bytes_remaining = 0; // number of bytes remaining in multi byte utf8 char to read after split boundary + this.bytes_in_sequence = 0; // bytes in multi byte utf8 char to read + this.temp_buffs = { "2": new Buffer(2), "3": new Buffer(3), "4": new Buffer(4) }; // for rebuilding chars split before boundary is reached + + // Stream offset + this.offset = -1; +} + +// Slow code to string converter (only used when throwing syntax errors) +Parser.toknam = function (code) { + var keys = Object.keys(C); + for (var i = 0, l = keys.length; i < l; i++) { + var key = keys[i]; + if (C[key] === code) { return key; } + } + return code && ("0x" + code.toString(16)); +}; + +var proto = Parser.prototype; +proto.onError = function (err) { throw err; }; +proto.charError = function (buffer, i) { + this.tState = STOP; + this.onError(new Error("Unexpected " + JSON.stringify(String.fromCharCode(buffer[i])) + " at position " + i + " in state " + Parser.toknam(this.tState))); +}; +proto.write = function (buffer) { + if (typeof buffer === "string") buffer = new Buffer(buffer); + var n; + for (var i = 0, l = buffer.length; i < l; i++) { + if (this.tState === START){ + n = buffer[i]; + this.offset++; + if(n === 0x7b){ this.onToken(LEFT_BRACE, "{"); // { + }else if(n === 0x7d){ this.onToken(RIGHT_BRACE, "}"); // } + }else if(n === 0x5b){ this.onToken(LEFT_BRACKET, "["); // [ + }else if(n === 0x5d){ this.onToken(RIGHT_BRACKET, "]"); // ] + }else if(n === 0x3a){ this.onToken(COLON, ":"); // : + }else if(n === 0x2c){ this.onToken(COMMA, ","); // , + }else if(n === 0x74){ this.tState = TRUE1; // t + }else if(n === 0x66){ this.tState = FALSE1; // f + }else if(n === 0x6e){ this.tState = NULL1; // n + }else if(n === 0x22){ this.string = ""; this.tState = STRING1; // " + }else if(n === 0x2d){ this.string = "-"; this.tState = NUMBER1; // - + }else{ + if (n >= 0x30 && n < 0x40) { // 1-9 + this.string = String.fromCharCode(n); this.tState = NUMBER3; + } else if (n === 0x20 || n === 0x09 || n === 0x0a || n === 0x0d) { + // whitespace + } else { + return this.charError(buffer, i); + } + } + }else if (this.tState === STRING1){ // After open quote + n = buffer[i]; // get current byte from buffer + // check for carry over of a multi byte char split between data chunks + // & fill temp buffer it with start of this data chunk up to the boundary limit set in the last iteration + if (this.bytes_remaining > 0) { + for (var j = 0; j < this.bytes_remaining; j++) { + this.temp_buffs[this.bytes_in_sequence][this.bytes_in_sequence - this.bytes_remaining + j] = buffer[j]; + } + this.string += this.temp_buffs[this.bytes_in_sequence].toString(); + this.bytes_in_sequence = this.bytes_remaining = 0; + i = i + j - 1; + } else if (this.bytes_remaining === 0 && n >= 128) { // else if no remainder bytes carried over, parse multi byte (>=128) chars one at a time + if (n <= 193 || n > 244) { + return this.onError(new Error("Invalid UTF-8 character at position " + i + " in state " + Parser.toknam(this.tState))); + } + if ((n >= 194) && (n <= 223)) this.bytes_in_sequence = 2; + if ((n >= 224) && (n <= 239)) this.bytes_in_sequence = 3; + if ((n >= 240) && (n <= 244)) this.bytes_in_sequence = 4; + if ((this.bytes_in_sequence + i) > buffer.length) { // if bytes needed to complete char fall outside buffer length, we have a boundary split + for (var k = 0; k <= (buffer.length - 1 - i); k++) { + this.temp_buffs[this.bytes_in_sequence][k] = buffer[i + k]; // fill temp buffer of correct size with bytes available in this chunk + } + this.bytes_remaining = (i + this.bytes_in_sequence) - buffer.length; + i = buffer.length - 1; + } else { + this.string += buffer.slice(i, (i + this.bytes_in_sequence)).toString(); + i = i + this.bytes_in_sequence - 1; + } + } else if (n === 0x22) { this.tState = START; this.onToken(STRING, this.string); this.offset += Buffer.byteLength(this.string, 'utf8') + 1; this.string = undefined; } + else if (n === 0x5c) { this.tState = STRING2; } + else if (n >= 0x20) { this.string += String.fromCharCode(n); } + else { + return this.charError(buffer, i); + } + }else if (this.tState === STRING2){ // After backslash + n = buffer[i]; + if(n === 0x22){ this.string += "\""; this.tState = STRING1; + }else if(n === 0x5c){ this.string += "\\"; this.tState = STRING1; + }else if(n === 0x2f){ this.string += "\/"; this.tState = STRING1; + }else if(n === 0x62){ this.string += "\b"; this.tState = STRING1; + }else if(n === 0x66){ this.string += "\f"; this.tState = STRING1; + }else if(n === 0x6e){ this.string += "\n"; this.tState = STRING1; + }else if(n === 0x72){ this.string += "\r"; this.tState = STRING1; + }else if(n === 0x74){ this.string += "\t"; this.tState = STRING1; + }else if(n === 0x75){ this.unicode = ""; this.tState = STRING3; + }else{ + return this.charError(buffer, i); + } + }else if (this.tState === STRING3 || this.tState === STRING4 || this.tState === STRING5 || this.tState === STRING6){ // unicode hex codes + n = buffer[i]; + // 0-9 A-F a-f + if ((n >= 0x30 && n < 0x40) || (n > 0x40 && n <= 0x46) || (n > 0x60 && n <= 0x66)) { + this.unicode += String.fromCharCode(n); + if (this.tState++ === STRING6) { + this.string += String.fromCharCode(parseInt(this.unicode, 16)); + this.unicode = undefined; + this.tState = STRING1; + } + } else { + return this.charError(buffer, i); + } + } else if (this.tState === NUMBER1 || this.tState === NUMBER3) { + n = buffer[i]; + + switch (n) { + case 0x30: // 0 + case 0x31: // 1 + case 0x32: // 2 + case 0x33: // 3 + case 0x34: // 4 + case 0x35: // 5 + case 0x36: // 6 + case 0x37: // 7 + case 0x38: // 8 + case 0x39: // 9 + case 0x2e: // . + case 0x65: // e + case 0x45: // E + case 0x2b: // + + case 0x2d: // - + this.string += String.fromCharCode(n); + this.tState = NUMBER3; + break; + default: + this.tState = START; + var result = Number(this.string); + + if (isNaN(result)){ + return this.charError(buffer, i); + } + + if ((this.string.match(/[0-9]+/) == this.string) && (result.toString() != this.string)) { + // Long string of digits which is an ID string and not valid and/or safe JavaScript integer Number + this.onToken(STRING, this.string); + } else { + this.onToken(NUMBER, result); + } + + this.offset += this.string.length - 1; + this.string = undefined; + i--; + break; + } + }else if (this.tState === TRUE1){ // r + if (buffer[i] === 0x72) { this.tState = TRUE2; } + else { return this.charError(buffer, i); } + }else if (this.tState === TRUE2){ // u + if (buffer[i] === 0x75) { this.tState = TRUE3; } + else { return this.charError(buffer, i); } + }else if (this.tState === TRUE3){ // e + if (buffer[i] === 0x65) { this.tState = START; this.onToken(TRUE, true); this.offset+= 3; } + else { return this.charError(buffer, i); } + }else if (this.tState === FALSE1){ // a + if (buffer[i] === 0x61) { this.tState = FALSE2; } + else { return this.charError(buffer, i); } + }else if (this.tState === FALSE2){ // l + if (buffer[i] === 0x6c) { this.tState = FALSE3; } + else { return this.charError(buffer, i); } + }else if (this.tState === FALSE3){ // s + if (buffer[i] === 0x73) { this.tState = FALSE4; } + else { return this.charError(buffer, i); } + }else if (this.tState === FALSE4){ // e + if (buffer[i] === 0x65) { this.tState = START; this.onToken(FALSE, false); this.offset+= 4; } + else { return this.charError(buffer, i); } + }else if (this.tState === NULL1){ // u + if (buffer[i] === 0x75) { this.tState = NULL2; } + else { return this.charError(buffer, i); } + }else if (this.tState === NULL2){ // l + if (buffer[i] === 0x6c) { this.tState = NULL3; } + else { return this.charError(buffer, i); } + }else if (this.tState === NULL3){ // l + if (buffer[i] === 0x6c) { this.tState = START; this.onToken(NULL, null); this.offset += 3; } + else { return this.charError(buffer, i); } + } + } +}; +proto.onToken = function (token, value) { + // Override this to get events +}; + +proto.parseError = function (token, value) { + this.tState = STOP; + this.onError(new Error("Unexpected " + Parser.toknam(token) + (value ? ("(" + JSON.stringify(value) + ")") : "") + " in state " + Parser.toknam(this.state))); +}; +proto.push = function () { + this.stack.push({value: this.value, key: this.key, mode: this.mode}); +}; +proto.pop = function () { + var value = this.value; + var parent = this.stack.pop(); + this.value = parent.value; + this.key = parent.key; + this.mode = parent.mode; + this.emit(value); + if (!this.mode) { this.state = VALUE; } +}; +proto.emit = function (value) { + if (this.mode) { this.state = COMMA; } + this.onValue(value); +}; +proto.onValue = function (value) { + // Override me +}; +proto.onToken = function (token, value) { + if(this.state === VALUE){ + if(token === STRING || token === NUMBER || token === TRUE || token === FALSE || token === NULL){ + if (this.value) { + this.value[this.key] = value; + } + this.emit(value); + }else if(token === LEFT_BRACE){ + this.push(); + if (this.value) { + this.value = this.value[this.key] = {}; + } else { + this.value = {}; + } + this.key = undefined; + this.state = KEY; + this.mode = OBJECT; + }else if(token === LEFT_BRACKET){ + this.push(); + if (this.value) { + this.value = this.value[this.key] = []; + } else { + this.value = []; + } + this.key = 0; + this.mode = ARRAY; + this.state = VALUE; + }else if(token === RIGHT_BRACE){ + if (this.mode === OBJECT) { + this.pop(); + } else { + return this.parseError(token, value); + } + }else if(token === RIGHT_BRACKET){ + if (this.mode === ARRAY) { + this.pop(); + } else { + return this.parseError(token, value); + } + }else{ + return this.parseError(token, value); + } + }else if(this.state === KEY){ + if (token === STRING) { + this.key = value; + this.state = COLON; + } else if (token === RIGHT_BRACE) { + this.pop(); + } else { + return this.parseError(token, value); + } + }else if(this.state === COLON){ + if (token === COLON) { this.state = VALUE; } + else { return this.parseError(token, value); } + }else if(this.state === COMMA){ + if (token === COMMA) { + if (this.mode === ARRAY) { this.key++; this.state = VALUE; } + else if (this.mode === OBJECT) { this.state = KEY; } + + } else if (token === RIGHT_BRACKET && this.mode === ARRAY || token === RIGHT_BRACE && this.mode === OBJECT) { + this.pop(); + } else { + return this.parseError(token, value); + } + }else{ + return this.parseError(token, value); + } +}; + +Parser.C = C; + +module.exports = Parser; http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/package.json ---------------------------------------------------------------------- diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/package.json b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/package.json new file mode 100644 index 0000000..996d5d9 --- /dev/null +++ b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/package.json @@ -0,0 +1,59 @@ +{ + "name": "jsonparse", + "description": "This is a pure-js JSON streaming parser for node.js", + "tags": [ + "json", + "stream" + ], + "version": "1.2.0", + "author": { + "name": "Tim Caswell", + "email": "tim@creationix.com" + }, + "repository": { + "type": "git", + "url": "git+ssh://git@github.com/creationix/jsonparse.git" + }, + "devDependencies": { + "tape": "~0.1.1", + "tap": "~0.3.3" + }, + "scripts": { + "test": "tap test/*.js" + }, + "bugs": { + "url": "http://github.com/creationix/jsonparse/issues" + }, + "engines": [ + "node >= 0.2.0" + ], + "license": "MIT", + "main": "jsonparse.js", + "gitHead": "b3f4dc7b49300a549aea19a628d712009ca84ced", + "homepage": "https://github.com/creationix/jsonparse", + "_id": "jsonparse@1.2.0", + "_shasum": "5c0c5685107160e72fe7489bddea0b44c2bc67bd", + "_from": "jsonparse@>=1.1.0 <2.0.0", + "_npmVersion": "1.4.28", + "_npmUser": { + "name": "creationix", + "email": "tim@creationix.com" + }, + "maintainers": [ + { + "name": "creationix", + "email": "tim@creationix.com" + }, + { + "name": "substack", + "email": "mail@substack.net" + } + ], + "dist": { + "shasum": "5c0c5685107160e72fe7489bddea0b44c2bc67bd", + "tarball": "http://registry.npmjs.org/jsonparse/-/jsonparse-1.2.0.tgz" + }, + "directories": {}, + "_resolved": "https://registry.npmjs.org/jsonparse/-/jsonparse-1.2.0.tgz", + "readme": "ERROR: No README data found!" +}