couchdb-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From gar...@apache.org
Subject [41/51] [abbrv] [partial] couchdb-nmo git commit: Remove node_modules from repo
Date Mon, 30 Nov 2015 09:37:23 GMT
http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/6436833c/node_modules/couchbulkimporter/node_modules/JSONStream/readme.markdown
----------------------------------------------------------------------
diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/readme.markdown b/node_modules/couchbulkimporter/node_modules/JSONStream/readme.markdown
deleted file mode 100644
index 4a6531a..0000000
--- a/node_modules/couchbulkimporter/node_modules/JSONStream/readme.markdown
+++ /dev/null
@@ -1,172 +0,0 @@
-# JSONStream
-
-streaming JSON.parse and stringify
-
-![](https://secure.travis-ci.org/dominictarr/JSONStream.png?branch=master)
-
-## example
-
-``` js
-
-var request = require('request')
-  , JSONStream = require('JSONStream')
-  , es = require('event-stream')
-
-request({url: 'http://isaacs.couchone.com/registry/_all_docs'})
-  .pipe(JSONStream.parse('rows.*'))
-  .pipe(es.mapSync(function (data) {
-    console.error(data)
-    return data
-  }))
-```
-
-## JSONStream.parse(path)
-
-parse stream of values that match a path
-
-``` js
-  JSONStream.parse('rows.*.doc')
-```
-
-The `..` operator is the recursive descent operator from [JSONPath](http://goessner.net/articles/JsonPath/),
which will match a child at any depth (see examples below).
-
-If your keys have keys that include `.` or `*` etc, use an array instead.
-`['row', true, /^doc/]`.
-
-If you use an array, `RegExp`s, booleans, and/or functions. The `..` operator is also available
in array representation, using `{recurse: true}`.
-any object that matches the path will be emitted as 'data' (and `pipe`d down stream)
-
-If `path` is empty or null, no 'data' events are emitted.
-
-### Examples
-
-query a couchdb view:
-
-``` bash
-curl -sS localhost:5984/tests/_all_docs&include_docs=true
-```
-you will get something like this:
-
-``` js
-{"total_rows":129,"offset":0,"rows":[
-  { "id":"change1_0.6995461115147918"
-  , "key":"change1_0.6995461115147918"
-  , "value":{"rev":"1-e240bae28c7bb3667f02760f6398d508"}
-  , "doc":{
-      "_id":  "change1_0.6995461115147918"
-    , "_rev": "1-e240bae28c7bb3667f02760f6398d508","hello":1}
-  },
-  { "id":"change2_0.6995461115147918"
-  , "key":"change2_0.6995461115147918"
-  , "value":{"rev":"1-13677d36b98c0c075145bb8975105153"}
-  , "doc":{
-      "_id":"change2_0.6995461115147918"
-    , "_rev":"1-13677d36b98c0c075145bb8975105153"
-    , "hello":2
-    }
-  },
-]}
-
-```
-
-we are probably most interested in the `rows.*.doc`
-
-create a `Stream` that parses the documents from the feed like this:
-
-``` js
-var stream = JSONStream.parse(['rows', true, 'doc']) //rows, ANYTHING, doc
-
-stream.on('data', function(data) {
-  console.log('received:', data);
-});
-```
-awesome!
-
-### recursive patterns (..)
-
-`JSONStream.parse('docs..value')` 
-(or `JSONStream.parse(['docs', {recurse: true}, 'value'])` using an array)
-will emit every `value` object that is a child, grand-child, etc. of the 
-`docs` object. In this example, it will match exactly 5 times at various depth
-levels, emitting 0, 1, 2, 3 and 4 as results.
-
-```js
-{
-  "total": 5,
-  "docs": [
-    {
-      "key": {
-        "value": 0,
-        "some": "property"
-      }
-    },
-    {"value": 1},
-    {"value": 2},
-    {"blbl": [{}, {"a":0, "b":1, "value":3}, 10]},
-    {"value": 4}
-  ]
-}
-```
-
-## JSONStream.parse(pattern, map)
-
-provide a function that can be used to map or filter
-the json output. `map` is passed the value at that node of the pattern,
-if `map` return non-nullish (anything but `null` or `undefined`)
-that value will be emitted in the stream. If it returns a nullish value,
-nothing will be emitted.
-
-## JSONStream.stringify(open, sep, close)
-
-Create a writable stream.
-
-you may pass in custom `open`, `close`, and `seperator` strings.
-But, by default, `JSONStream.stringify()` will create an array,
-(with default options `open='[\n', sep='\n,\n', close='\n]\n'`)
-
-If you call `JSONStream.stringify(false)`
-the elements will only be seperated by a newline.
-
-If you only write one item this will be valid JSON.
-
-If you write many items,
-you can use a `RegExp` to split it into valid chunks.
-
-## JSONStream.stringifyObject(open, sep, close)
-
-Very much like `JSONStream.stringify`,
-but creates a writable stream for objects instead of arrays.
-
-Accordingly, `open='{\n', sep='\n,\n', close='\n}\n'`.
-
-When you `.write()` to the stream you must supply an array with `[ key, data ]`
-as the first argument.
-
-## unix tool
-
-query npm to see all the modules that browserify has ever depended on.
-
-``` bash
-curl https://registry.npmjs.org/browserify | JSONStream 'versions.*.dependencies'
-```
-
-## numbers
-
-There are occasional problems parsing and unparsing very precise numbers.
-
-I have opened an issue here:
-
-https://github.com/creationix/jsonparse/issues/2
-
-+1
-
-## Acknowlegements
-
-this module depends on https://github.com/creationix/jsonparse
-by Tim Caswell
-and also thanks to Florent Jaby for teaching me about parsing with:
-https://github.com/Floby/node-json-streams
-
-## license
-
-Dual-licensed under the MIT License or the Apache License, version 2.0

http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/6436833c/node_modules/couchbulkimporter/node_modules/JSONStream/test/bool.js
----------------------------------------------------------------------
diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/test/bool.js b/node_modules/couchbulkimporter/node_modules/JSONStream/test/bool.js
deleted file mode 100644
index 6c386d6..0000000
--- a/node_modules/couchbulkimporter/node_modules/JSONStream/test/bool.js
+++ /dev/null
@@ -1,41 +0,0 @@
-
-var fs = require ('fs')
-  , join = require('path').join
-  , file = join(__dirname, 'fixtures','all_npm.json')
-  , JSONStream = require('../')
-  , it = require('it-is').style('colour')
-
-  function randomObj () {
-    return (
-      Math.random () < 0.4
-      ? {hello: 'eonuhckmqjk',
-          whatever: 236515,
-          lies: true,
-          nothing: [null],
-//          stuff: [Math.random(),Math.random(),Math.random()]
-        } 
-      : ['AOREC', 'reoubaor', {ouec: 62642}, [[[], {}, 53]]]
-    )
-  }
-
-var expected =  []
-  , stringify = JSONStream.stringify()
-  , es = require('event-stream')
-  , stringified = ''
-  , called = 0
-  , count = 10
-  , ended = false
-  
-while (count --)
-  expected.push(randomObj())
-
-  es.connect(
-    es.readArray(expected),
-    stringify,
-    JSONStream.parse([true]),
-    es.writeArray(function (err, lines) {
-    
-      it(lines).has(expected)
-      console.error('PASSED')
-    })
-  )

http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/6436833c/node_modules/couchbulkimporter/node_modules/JSONStream/test/browser.js
----------------------------------------------------------------------
diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/test/browser.js b/node_modules/couchbulkimporter/node_modules/JSONStream/test/browser.js
deleted file mode 100644
index 3c28d49..0000000
--- a/node_modules/couchbulkimporter/node_modules/JSONStream/test/browser.js
+++ /dev/null
@@ -1,18 +0,0 @@
-var test = require('tape')
-var JSONStream = require('../')
-var testData = '{"rows":[{"hello":"world"}, {"foo": "bar"}]}'
-
-test('basic parsing', function (t) {
-  t.plan(2)
-  var parsed = JSONStream.parse("rows.*")
-  var parsedKeys = {}
-  parsed.on('data', function(match) {
-    parsedKeys[Object.keys(match)[0]] = true
-  })
-  parsed.on('end', function() {
-    t.equal(!!parsedKeys['hello'], true)
-    t.equal(!!parsedKeys['foo'], true)
-  })
-  parsed.write(testData)
-  parsed.end()
-})
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/6436833c/node_modules/couchbulkimporter/node_modules/JSONStream/test/destroy_missing.js
----------------------------------------------------------------------
diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/test/destroy_missing.js
b/node_modules/couchbulkimporter/node_modules/JSONStream/test/destroy_missing.js
deleted file mode 100644
index 315fdc8..0000000
--- a/node_modules/couchbulkimporter/node_modules/JSONStream/test/destroy_missing.js
+++ /dev/null
@@ -1,27 +0,0 @@
-var fs = require ('fs');
-var net = require('net');
-var join = require('path').join;
-var file = join(__dirname, 'fixtures','all_npm.json');
-var JSONStream = require('../');
-
-
-var server = net.createServer(function(client) {
-    var parser = JSONStream.parse([]);
-    parser.on('end', function() {
-        console.log('close')
-        console.error('PASSED');
-        server.close();
-    });
-    client.pipe(parser);
-    var n = 4
-    client.on('data', function () {
-      if(--n) return
-      client.end();
-    })
-});
-server.listen(9999);
-
-
-var client = net.connect({ port : 9999 }, function() {
-    fs.createReadStream(file).pipe(client).on('data', console.log) //.resume();
-});

http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/6436833c/node_modules/couchbulkimporter/node_modules/JSONStream/test/disabled/doubledot1.js
----------------------------------------------------------------------
diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/test/disabled/doubledot1.js
b/node_modules/couchbulkimporter/node_modules/JSONStream/test/disabled/doubledot1.js
deleted file mode 100644
index 78149b9..0000000
--- a/node_modules/couchbulkimporter/node_modules/JSONStream/test/disabled/doubledot1.js
+++ /dev/null
@@ -1,29 +0,0 @@
-var fs = require ('fs')
-  , join = require('path').join
-  , file = join(__dirname, 'fixtures','all_npm.json')
-  , JSONStream = require('../')
-  , it = require('it-is')
-
-var expected = JSON.parse(fs.readFileSync(file))
-  , parser = JSONStream.parse('rows..rev')
-  , called = 0
-  , ended = false
-  , parsed = []
-
-fs.createReadStream(file).pipe(parser)
-  
-parser.on('data', function (data) {
-  called ++
-  parsed.push(data)
-})
-
-parser.on('end', function () {
-  ended = true
-})
-
-process.on('exit', function () {
-  it(called).equal(expected.rows.length)
-  for (var i = 0 ; i < expected.rows.length ; i++)
-    it(parsed[i]).deepEqual(expected.rows[i].value.rev)
-  console.error('PASSED')
-})

http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/6436833c/node_modules/couchbulkimporter/node_modules/JSONStream/test/disabled/doubledot2.js
----------------------------------------------------------------------
diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/test/disabled/doubledot2.js
b/node_modules/couchbulkimporter/node_modules/JSONStream/test/disabled/doubledot2.js
deleted file mode 100644
index f99d881..0000000
--- a/node_modules/couchbulkimporter/node_modules/JSONStream/test/disabled/doubledot2.js
+++ /dev/null
@@ -1,29 +0,0 @@
- var fs = require ('fs')
-   , join = require('path').join
-   , file = join(__dirname, 'fixtures','depth.json')
-   , JSONStream = require('../')
-   , it = require('it-is')
-
- var expected = JSON.parse(fs.readFileSync(file))
-   , parser = JSONStream.parse(['docs', {recurse: true}, 'value'])
-   , called = 0
-   , ended = false
-   , parsed = []
-
- fs.createReadStream(file).pipe(parser)
-  
- parser.on('data', function (data) {
-   called ++
-   parsed.push(data)
- })
-
- parser.on('end', function () {
-   ended = true
- })
-
- process.on('exit', function () {
-   it(called).equal(5)
-   for (var i = 0 ; i < 5 ; i++)
-     it(parsed[i]).deepEqual(i)
-   console.error('PASSED')
- })

http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/6436833c/node_modules/couchbulkimporter/node_modules/JSONStream/test/empty.js
----------------------------------------------------------------------
diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/test/empty.js b/node_modules/couchbulkimporter/node_modules/JSONStream/test/empty.js
deleted file mode 100644
index 19e888c..0000000
--- a/node_modules/couchbulkimporter/node_modules/JSONStream/test/empty.js
+++ /dev/null
@@ -1,44 +0,0 @@
-var JSONStream = require('../')
-  , stream = require('stream')
-  , it = require('it-is')
-
-var output = [ [], [] ]
-
-var parser1 = JSONStream.parse(['docs', /./])
-parser1.on('data', function(data) {
-  output[0].push(data)
-})
-
-var parser2 = JSONStream.parse(['docs', /./])
-parser2.on('data', function(data) {
-  output[1].push(data)
-})
-
-var pending = 2
-function onend () {
-  if (--pending > 0) return
-  it(output).deepEqual([
-    [], [{hello: 'world'}]
-  ])
-  console.error('PASSED')
-}
-parser1.on('end', onend)
-parser2.on('end', onend)
-
-function makeReadableStream() {
-  var readStream = new stream.Stream()
-  readStream.readable = true
-  readStream.write = function (data) { this.emit('data', data) }
-  readStream.end = function (data) { this.emit('end') }
-  return readStream
-}
-
-var emptyArray = makeReadableStream()
-emptyArray.pipe(parser1)
-emptyArray.write('{"docs":[]}')
-emptyArray.end()
-
-var objectArray = makeReadableStream()
-objectArray.pipe(parser2)
-objectArray.write('{"docs":[{"hello":"world"}]}')
-objectArray.end()


Mime
View raw message