diff --git a/index.js b/index.js index 9c8ac1e..5546d46 100755 --- a/index.js +++ b/index.js @@ -15,7 +15,7 @@ var Parser = require('jsonparse') */ exports.parse = function (path, map) { - + var header, footer var parser = new Parser() var stream = through(function (chunk) { if('string' === typeof chunk) @@ -25,6 +25,10 @@ exports.parse = function (path, map) { function (data) { if(data) stream.write(data) + if (header) + stream.emit('header', header) + if (footer) + stream.emit('footer', footer) stream.queue(null) }) @@ -62,7 +66,10 @@ exports.parse = function (path, map) { if (key && !key.recurse) { c = (j === this.stack.length) ? this : this.stack[j] if (!c) return - if (! check(key, c.key)) return + if (! check(key, c.key)) { + setHeaderFooter(c.key, value) + return + } emitKey = !!key.emitKey; i++ } else { @@ -77,6 +84,8 @@ exports.parse = function (path, map) { if (!Object.isFrozen(this.stack[j])) this.stack[j].value = null break + } else { + setHeaderFooter(c.key, value) } j++ } @@ -97,6 +106,12 @@ exports.parse = function (path, map) { for(var k in this.stack) if (!Object.isFrozen(this.stack[k])) this.stack[k].value = null + + // emit header + if (header) { + stream.emit('header', header); + header = false; + } } parser._onToken = parser.onToken; @@ -118,8 +133,21 @@ exports.parse = function (path, map) { stream.emit('error', err) } - return stream + + function setHeaderFooter(key, value) { + // header has not been emitted yet + if (header !== false) { + header = header || {} + header[key] = value + } + + // footer has not been emitted yet but header has + if (footer !== false && header === false) { + footer = footer || {} + footer[key] = value + } + } } function check (x, y) { diff --git a/test/error_contents.js b/test/error_contents.js new file mode 100644 index 0000000..13c27ae --- /dev/null +++ b/test/error_contents.js @@ -0,0 +1,45 @@ + + +var fs = require ('fs') + , join = require('path').join + , file = join(__dirname, 'fixtures','error.json') + , JSONStream = require('../') + , it = require('it-is') + +var expected = JSON.parse(fs.readFileSync(file)) + , parser = JSONStream.parse(['rows']) + , called = 0 + , headerCalled = 0 + , footerCalled = 0 + , ended = false + , parsed = [] + +fs.createReadStream(file).pipe(parser) + +parser.on('header', function (data) { + headerCalled ++ + it(data).deepEqual({ + error: 'error_code', + message: 'this is an error message' + }) +}) + +parser.on('footer', function (data) { + footerCalled ++ +}) + +parser.on('data', function (data) { + called ++ + parsed.push(data) +}) + +parser.on('end', function () { + ended = true +}) + +process.on('exit', function () { + it(called).equal(0) + it(headerCalled).equal(1) + it(footerCalled).equal(0) + console.error('PASSED') +}) diff --git a/test/fixtures/error.json b/test/fixtures/error.json new file mode 100644 index 0000000..9736f3e --- /dev/null +++ b/test/fixtures/error.json @@ -0,0 +1 @@ +{"error": "error_code", "message": "this is an error message"} diff --git a/test/fixtures/header_footer.json b/test/fixtures/header_footer.json new file mode 100644 index 0000000..6e4694d --- /dev/null +++ b/test/fixtures/header_footer.json @@ -0,0 +1,19 @@ +{"total_rows":129,"offset":0,"rows":[ + { "id":"change1_0.6995461115147918" + , "key":"change1_0.6995461115147918" + , "value":{"rev":"1-e240bae28c7bb3667f02760f6398d508"} + , "doc":{ + "_id": "change1_0.6995461115147918" + , "_rev": "1-e240bae28c7bb3667f02760f6398d508","hello":1} + }, + { "id":"change2_0.6995461115147918" + , "key":"change2_0.6995461115147918" + , "value":{"rev":"1-13677d36b98c0c075145bb8975105153"} + , "doc":{ + "_id":"change2_0.6995461115147918" + , "_rev":"1-13677d36b98c0c075145bb8975105153" + , "hello":2 + } + } +], +"foo": {"bar": "baz"}} diff --git a/test/header_footer.js b/test/header_footer.js new file mode 100644 index 0000000..ef01879 --- /dev/null +++ b/test/header_footer.js @@ -0,0 +1,54 @@ + + +var fs = require ('fs') + , join = require('path').join + , file = join(__dirname, 'fixtures','header_footer.json') + , JSONStream = require('../') + , it = require('it-is') + +var expected = JSON.parse(fs.readFileSync(file)) + , parser = JSONStream.parse(['rows', /\d+/ /*, 'value'*/]) + , called = 0 + , headerCalled = 0 + , footerCalled = 0 + , ended = false + , parsed = [] + +fs.createReadStream(file).pipe(parser) + +parser.on('header', function (data) { + headerCalled ++ + it(data).deepEqual({ + total_rows: 129, + offset: 0 + }) +}) + +parser.on('footer', function (data) { + footerCalled ++ + it(data).deepEqual({ + foo: { bar: 'baz' } + }) +}) + +parser.on('data', function (data) { + called ++ + it.has({ + id: it.typeof('string'), + value: {rev: it.typeof('string')}, + key:it.typeof('string') + }) + parsed.push(data) +}) + +parser.on('end', function () { + ended = true +}) + +process.on('exit', function () { + it(called).equal(expected.rows.length) + it(headerCalled).equal(1) + it(footerCalled).equal(1) + it(parsed).deepEqual(expected.rows) + console.error('PASSED') +})