The following data stream does not trigger the 'end' event. The 'data' event is triggered and I can see every data row logged to the console.
var AWS = require('aws-sdk');
var ogr2ogr = require('ogr2ogr');
var JSONStream = require('JSONStream');
var S3 = new AWS.S3();
var source = S3.getObject({bucket: ..., key: ...}).createReadStream();
var stream = ogr2ogr(source).format("GeoJSON").stream()
.pipe(JSONStream.parse('features.*'));
stream.on('data', function(data){
console.log(data); // Correctly outputs 70 rows of data.
})
stream.on('end', function(){
console.log('end'); // This code is never executed.
})
stream.on('error', function(err){
console.log(err); // No errors...
})
The process works if I create a write -> read stream after the ogr2ogr transform.
The following data stream does not trigger the 'end' event. The 'data' event is triggered and I can see every data row logged to the console.
var AWS = require('aws-sdk');
var ogr2ogr = require('ogr2ogr');
var JSONStream = require('JSONStream');
var S3 = new AWS.S3();
var source = S3.getObject({bucket: ..., key: ...}).createReadStream();
var stream = ogr2ogr(source).format("GeoJSON").stream()
.pipe(JSONStream.parse('features.*'));
stream.on('data', function(data){
console.log(data); // Correctly outputs 70 rows of data.
})
stream.on('end', function(){
console.log('end'); // This code is never executed.
})
stream.on('error', function(err){
console.log(err); // No errors...
})
The process works if I create a write -> read stream after the ogr2ogr transform.
Share Improve this question asked Mar 4, 2016 at 20:49 MonoMono 3922 gold badges6 silver badges21 bronze badges1 Answer
Reset to default 4Take a look at the docs: https://nodejs/api/stream.html#stream_event_end
Note that the 'end' event will not fire unless the data is pletely consumed. This can be done by switching into a flowing mode, or by calling stream.read() repeatedly until you get to the end
发布者:admin,转转请注明出处:http://www.yc00.com/questions/1743922396a4530300.html
评论列表(0条)