This library allows you to read large amounts of data in chunks.
npm install --save async-chunk-reader
init(parameters : InitParameters)
input : InitParameters
- chunk_size : String
- input_file : String | Stream
- encoding : String
get()
output : Async Iterator
const reader = require('async-chunk-reader')
import * as reader from "async-chunk-reader"
async function main(){
const x = await reader
.init({
chunk_size: 100000,
input_file: 'input/mobile_network_201805.csv.gz'
})
.get()
for await(let chunk of x){
console.log(chunk.length)
}
}
main();
async function main(){
const x = await reader
.init({
input_file: process.stdin
})
.get()
for await(let chunk of x){
console.log(chunk.length)
}
}
main();
async function main(){
const x = await reader
.init({
input_file: "Some string"
})
.get()
for await(let chunk of x){
console.log(chunk.length)
}
}
main();