-
Notifications
You must be signed in to change notification settings - Fork 0
/
auntie-count-async-stream-example.js
49 lines (41 loc) · 1.57 KB
/
auntie-count-async-stream-example.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
/*
* Auntie#count example it (ASYNC) loads a file containing 8192 long english words,
* separated by CRLF '\r\n' sequence
* For "messing" things up, you could reduce the chunk size to 4 bytes.
*/
const log = console.log
, fs = require( 'fs' )
, Auntie = require( '../' )
, path = __dirname + '/data/long-english-words-crlf.txt'
, pattern = '\r\n'
// default pattern is '\r\n'
, untie = Auntie( pattern )
// create an async read stream
, rstream = fs.createReadStream( path )
;
log( '\n- Auntie#count example, load english long words from a file in ASYNC way:\n "%s"\n', path );
// uncomment lines below to reduce the stream chunk size to 4 bytes
// log( '- current highwatermark value for stream: %d bytes', rstream._readableState.highWaterMark );
// I voluntarily reduce the chunk buffer size to 4 bytes
// rstream._readableState.highWaterMark = 4;
// log( '- new highwatermark value for stream: %d bytes', rstream._readableState.highWaterMark );
log( '- sequence to parse is "\\r\\n" ->', untie.seq );
log( '- starting parse data stream..' );
log( '- counting occurrences in the data stream..' );
let chunks = 0
, tot = 0
;
rstream.on( 'data', function ( chunk ) {
++chunks;
tot += chunk.length;
let cnt = untie.count( chunk )[ 0 ];
} );
rstream.on( 'end', function () {
log( '- !end stream' );
} );
rstream.on( 'close', function () {
log( '- !close stream' );
log( '\n- total data length: %d bytes', tot );
log( '- total data chunks: %d ', chunks );
log( '\n- total matches: %d (lines)\n', untie.cnt[ 0 ] );
} );