Skip to content

Commit e0d945b

Browse files
authored
Merge pull request #297 from C2FO/v3.6.0-rc
V3.6.0 rc
2 parents b0d60ac + 20d7a99 commit e0d945b

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

41 files changed

+813
-170
lines changed

.npmignore

Lines changed: 0 additions & 8 deletions
This file was deleted.

History.md

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,11 @@
1+
# v3.6.0
2+
3+
* [ADDED] `maxRows` option to limit the number of rows parsed. [#275](https://github.com/C2FO/fast-csv/issues/275) [#277](https://github.com/C2FO/fast-csv/pull/277) - [@cbrittingham](https://github.com/cbrittingham)
4+
* [ADDED] `skipRows` to allow skipping parsed rows see [parsing.md](./docs/parsing.md)
5+
* [ADDED] `skipLines` to allow skipping entire lines of a csv [parsing.md](./docs/parsing.md) [#267](https://github.com/C2FO/fast-csv/issues/267)
6+
* Exported formatting and parsing types.
7+
* Removed `.npmignore` in favor of `package.json` files
8+
19
# v3.5.0
210

311
* Upgraded dependencies

benchmark/.eslintrc.js

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
module.exports = {
2+
parserOptions: {
3+
project: null,
4+
},
25
rules: {
36
"no-console": 0,
4-
"@typescript-eslint/no-var-requires": 0
57
},
68
};

benchmark/index.js

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@ const path = require('path');
22
const fs = require('fs');
33
const fastCsv = require('..');
44

5-
65
function camelize(str) {
76
return str.replace(/_(.)/g, (a, b) => b.toUpperCase());
87
}
@@ -11,7 +10,7 @@ const promisfyStream = (stream, expectedRows) => {
1110
let count = 0;
1211
return new Promise((res, rej) => {
1312
stream
14-
.on('data', (row) => {
13+
.on('data', row => {
1514
count += 1;
1615
})
1716
.on('end', () => {
@@ -25,13 +24,14 @@ const promisfyStream = (stream, expectedRows) => {
2524
});
2625
};
2726

28-
const benchmarkFastCsv = type => (num) => {
27+
const benchmarkFastCsv = type => num => {
2928
const file = path.resolve(__dirname, `./assets/${num}.${type}.csv`);
30-
const stream = fs.createReadStream(file)
31-
.pipe(fastCsv.parse({ headers: true }))
32-
.transform((data) => {
29+
const stream = fs
30+
.createReadStream(file)
31+
.pipe(fastCsv.parse({ headers: true, maxRows: 10 }))
32+
.transform(data => {
3333
const ret = {};
34-
[ 'first_name', 'last_name', 'email_address' ].forEach((prop) => {
34+
['first_name', 'last_name', 'email_address'].forEach(prop => {
3535
ret[camelize(prop)] = data[prop];
3636
});
3737
ret.address = data.address;
@@ -47,15 +47,15 @@ async function benchmarkRun(title, num, m) {
4747
for (let i = 0; i < howMany; i += 1) {
4848
// eslint-disable-next-line no-await-in-loop
4949
await m(num);
50-
console.log('%s: RUN(%d lines) 1 %dms', title, num, (new Date() - runStart));
50+
console.log('%s: RUN(%d lines) 1 %dms', title, num, new Date() - runStart);
5151
runStart = new Date();
5252
}
5353
console.log('%s: 3xAVG for %d lines %dms', title, num, (new Date() - start) / howMany);
5454
}
5555

5656
function runBenchmarks(num, type) {
5757
console.log(`\nRUNNING ${num}.${type}.csv benchmarks`, num);
58-
return benchmarkRun('fast-csv', num, benchmarkFastCsv(type))
58+
return benchmarkRun('fast-csv', num, benchmarkFastCsv(type));
5959
}
6060

6161
function benchmarks(type) {
@@ -67,7 +67,7 @@ function benchmarks(type) {
6767
benchmarks('nonquoted')
6868
.then(() => benchmarks('quoted'))
6969
.then(() => process.exit())
70-
.catch((e) => {
70+
.catch(e => {
7171
console.error(e.stack);
7272
return process.exit(1);
7373
});

docs/parsing.md

Lines changed: 126 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,9 @@
1717
* [Ignoring Empty Rows](#csv-parse-ignoring-empty-rows)
1818
* [Transforming Rows](#csv-parse-transforming)
1919
* [Validating Rows](#csv-parse-validation)
20+
* [Max Rows](#max-rows)
21+
* [Skip Rows](#skip-rows)
22+
* [Skip Lines](#skip-lines)
2023

2124
<a name="parsing-options"></a>
2225
## Options
@@ -45,6 +48,9 @@
4548
* `rtrim: {boolean} = false`: Set to `true` to right trim all fields.
4649
* `ltrim: {boolean} = false`: Set to `true` to left trim all fields.
4750
* `encoding: {string} = 'utf8'`: Passed to [StringDecoder](https://nodejs.org/api/string_decoder.html#string_decoder_new_stringdecoder_encoding) when decoding incoming buffers. Change if incoming content is not 'utf8' encoded.
51+
* `maxRows: {number} = 0`: If number is `> 0` the specified number of rows will be parsed.(e.g. `100` would return the first 100 rows of data).
52+
* `skipRows: {number} = 0`: If number is `> 0` the specified number of **parsed** rows will be skipped.
53+
* `skipLines: {number} = 0`: If number is `> 0` the specified number of lines will be skipped.
4854

4955
<a name="parsing-events"></a>
5056
## Events
@@ -585,3 +591,123 @@ Valid [row={"firstName":"timmy","lastName":"yukon"}]
585591
Parsed 2 rows
586592
```
587593

594+
<a name="max-rows"></a>
595+
[`examples/parsing/max_rows.example.example.js`](../examples/parsing/max_rows.example.js)
596+
597+
In the following example there are 10 rows, but only 5 will be parsed because of the `maxRows` option.
598+
599+
```javascript
600+
const rows = [
601+
'header1,header2\n',
602+
'col1,col1\n',
603+
'col2,col2\n',
604+
'col3,col3\n',
605+
'col4,col4\n',
606+
'col5,col5\n',
607+
'col6,col6\n',
608+
'col7,col7\n',
609+
'col8,col8\n',
610+
'col9,col9\n',
611+
'col10,col10',
612+
];
613+
614+
const stream = csv
615+
.parse({ headers: true, maxRows: 5 })
616+
.on('error', error => console.error(error))
617+
.on('data', row => console.log(row))
618+
.on('end', rowCount => console.log(`Parsed ${rowCount} rows`));
619+
620+
rows.forEach(row => stream.write(row));
621+
stream.end();
622+
```
623+
624+
Expected output
625+
626+
```
627+
{ header1: 'col1', header2: 'col1' }
628+
{ header1: 'col2', header2: 'col2' }
629+
{ header1: 'col3', header2: 'col3' }
630+
{ header1: 'col4', header2: 'col4' }
631+
{ header1: 'col5', header2: 'col5' }
632+
Parsed 5 rows
633+
```
634+
635+
<a name="skip-rows"></a>
636+
[`examples/parsing/skip_rows.example.example.js`](../examples/parsing/skip_rows.example.js)
637+
638+
In the following example the first 2 rows are skipped.
639+
640+
**NOTE** Notice how the header row is not skipped, only the row.
641+
642+
```javascript
643+
const rows = [
644+
'header1,header2\n',
645+
'col1,col1\n',
646+
'col2,col2\n',
647+
'col3,col3\n',
648+
'col4,col4\n',
649+
'col5,col5\n',
650+
'col6,col6\n',
651+
];
652+
653+
const stream = csv
654+
.parse({ headers: true, skipRows: 2 })
655+
.on('error', error => console.error(error))
656+
.on('data', row => console.log(row))
657+
.on('end', rowCount => console.log(`Parsed ${rowCount} rows`));
658+
659+
rows.forEach(row => stream.write(row));
660+
stream.end();
661+
```
662+
663+
Expected output
664+
665+
```
666+
{ header1: 'col3', header2: 'col3' }
667+
{ header1: 'col4', header2: 'col4' }
668+
{ header1: 'col5', header2: 'col5' }
669+
{ header1: 'col6', header2: 'col6' }
670+
Parsed 4 rows
671+
```
672+
673+
<a name="skip-lines"></a>
674+
[`examples/parsing/skip_lines.example.example.js`](../examples/parsing/skip_lines.example.js)
675+
676+
In the following example the first 2 lines are skipped.
677+
678+
**NOTE** Notice how the headers come from the third line because the first two are skipped.
679+
680+
```javascript
681+
const csv = require('../../');
682+
683+
const rows = [
684+
'skip1_header1,skip1_header2\n',
685+
'skip2_header1,skip2_header2\n',
686+
'header1,header2\n',
687+
'col1,col1\n',
688+
'col2,col2\n',
689+
'col3,col3\n',
690+
'col4,col4\n',
691+
];
692+
693+
const stream = csv
694+
.parse({ headers: true, skipLines: 2 })
695+
.on('error', error => console.error(error))
696+
.on('data', row => console.log(row))
697+
.on('end', rowCount => console.log(`Parsed ${rowCount} rows`));
698+
699+
rows.forEach(row => stream.write(row));
700+
stream.end();
701+
```
702+
703+
Expected output
704+
705+
```
706+
{ header1: 'col1', header2: 'col1' }
707+
{ header1: 'col2', header2: 'col2' }
708+
{ header1: 'col3', header2: 'col3' }
709+
{ header1: 'col4', header2: 'col4' }
710+
Parsed 4 rows
711+
```
712+
713+
Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
const csv = require('../../');
2+
3+
const rows = [
4+
'header1,header2\n',
5+
'col1,col1\n',
6+
'col2,col2\n',
7+
'col3,col3\n',
8+
'col4,col4\n',
9+
'col5,col5\n',
10+
'col6,col6\n',
11+
'col7,col7\n',
12+
'col8,col8\n',
13+
'col9,col9\n',
14+
'col10,col10',
15+
];
16+
17+
const stream = csv
18+
.parse({ headers: true, maxRows: 5 })
19+
.on('error', error => console.error(error))
20+
.on('data', row => console.log(row))
21+
.on('end', rowCount => console.log(`Parsed ${rowCount} rows`));
22+
23+
rows.forEach(row => stream.write(row));
24+
stream.end();
Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
const csv = require('../../');
2+
3+
const rows = [
4+
'skip1_header1,skip1_header2\n',
5+
'skip2_header1,skip2_header2\n',
6+
'header1,header2\n',
7+
'col1,col1\n',
8+
'col2,col2\n',
9+
'col3,col3\n',
10+
'col4,col4\n',
11+
];
12+
13+
const stream = csv
14+
.parse({ headers: true, skipLines: 2 })
15+
.on('error', error => console.error(error))
16+
.on('data', row => console.log(row))
17+
.on('end', rowCount => console.log(`Parsed ${rowCount} rows`));
18+
19+
rows.forEach(row => stream.write(row));
20+
stream.end();
Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
const csv = require('../../');
2+
3+
const rows = [
4+
'header1,header2\n',
5+
'col1,col1\n',
6+
'col2,col2\n',
7+
'col3,col3\n',
8+
'col4,col4\n',
9+
'col5,col5\n',
10+
'col6,col6\n',
11+
];
12+
13+
const stream = csv
14+
.parse({ headers: true, skipRows: 2 })
15+
.on('error', error => console.error(error))
16+
.on('data', row => console.log(row))
17+
.on('end', rowCount => console.log(`Parsed ${rowCount} rows`));
18+
19+
rows.forEach(row => stream.write(row));
20+
stream.end();

package.json

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,11 @@
11
{
22
"name": "fast-csv",
3-
"version": "3.5.0",
3+
"version": "3.6.0",
44
"description": "CSV parser and writer",
55
"main": "./build/src/index.js",
66
"types": "./build/src/index.d.ts",
77
"scripts": {
8-
"prepublish": "npm run build",
8+
"prepare": "npm run build",
99
"build": "tsc",
1010
"mocha": "nyc mocha",
1111
"test": "npm run lint && npm run mocha",
@@ -14,6 +14,7 @@
1414
"benchmark": "node ./benchmark",
1515
"coverage": "nyc report --reporter=text-lcov | coveralls"
1616
},
17+
"files": ["build/src/**"],
1718
"repository": {
1819
"type": "git",
1920
"url": "[email protected]:C2FO/fast-csv.git"

src/formatter/formatter/index.ts

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,2 @@
1-
import RowFormatter from './RowFormatter';
2-
3-
export default {
4-
RowFormatter,
5-
};
1+
export { default as RowFormatter } from './RowFormatter';
2+
export { default as FieldFormatter } from './FieldFormatter';

0 commit comments

Comments
 (0)