Skip to content

Commit e337fd7

Browse files
committed
Merge pull request #52 from doug-martin/master
v0.4.2
2 parents 9273b9b + ca43ee5 commit e337fd7

File tree

8 files changed

+124
-50
lines changed

8 files changed

+124
-50
lines changed

.travis.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
language: node_js
22
node_js:
3-
- 0.1
3+
- "0.10"
44
before_script:
55
- npm install -g grunt-cli

History.md

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,11 @@
1+
# v0.4.2
2+
3+
* Added ability to specify a rowDelimiter when creating a csv.
4+
* Added discardUnmappedColumns option to allow the ignoring of extra data [#45](https://github.com/C2FO/fast-csv/pull/45)
5+
16
# v0.4.1
27

3-
* Fixed race condition that occured if you called pause during a flush.
8+
* Fixed race condition that occurred if you called pause during a flush.
49

510
# v0.4.0
611

README.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -216,6 +216,7 @@ csv
216216
Formatting accepts the same options as parsing with an additional `transform` option.
217217
218218
* `transform(row)`: A function that accepts a row and returns a transformed one to be written.
219+
* `rowDelimiter='\n'`: Specify an alternate row delimiter (i.e `\r\n`)
219220
220221
**`createWriteStream(options)`**
221222

docs/History.html

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -176,9 +176,14 @@
176176

177177

178178

179+
<h1>v0.4.2</h1>
180+
<ul>
181+
<li>Added ability to specify a rowDelimiter when creating a csv.</li>
182+
<li>Added discardUnmappedColumns option to allow the ignoring of extra data <a href="https://github.com/C2FO/fast-csv/pull/45">#45</a></li>
183+
</ul>
179184
<h1>v0.4.1</h1>
180185
<ul>
181-
<li>Fixed race condition that occured if you called pause during a flush.</li>
186+
<li>Fixed race condition that occurred if you called pause during a flush.</li>
182187
</ul>
183188
<h1>v0.4.0</h1>
184189
<ul>

docs/index.html

Lines changed: 13 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -190,6 +190,7 @@ <h3>Parsing</h3>
190190
<li><code>objectMode=true</code>: Ensure that <code>data</code> events have an object emitted rather than the stringified version set to false to have a stringified buffer.</li>
191191
<li><code>headers=false</code>: Ste to true if you expect the first line of your <code>CSV</code> to contain headers, alternatly you can specify an array of headers to use.</li>
192192
<li><code>ignoreEmpty=false</code>: If you wish to ignore empty rows.</li>
193+
<li><code>discardUnmappedColumns=false</code>: If you want to discard columns that do not map to a header.</li>
193194
<li><code>delimiter=&#39;,&#39;</code>: If your data uses an alternate delimiter such as <code>;</code> or <code>\t</code>.<ul>
194195
<li><strong>NOTE</strong> When specifying an alternate <code>delimiter</code> you may only pass in a single character delimiter</li>
195196
</ul>
@@ -218,7 +219,7 @@ <h3>Parsing</h3>
218219

219220
var csvStream = csv()
220221
.on(&quot;record&quot;, function(data){
221-
console.log(data):
222+
console.log(data);
222223
})
223224
.on(&quot;end&quot;, function(){
224225
console.log(&quot;done&quot;);
@@ -232,7 +233,7 @@ <h3>Parsing</h3>
232233
csv
233234
.fromPath(&quot;my.csv&quot;)
234235
.on(&quot;record&quot;, function(data){
235-
console.log(data):
236+
console.log(data);
236237
})
237238
.on(&quot;end&quot;, function(){
238239
console.log(&quot;done&quot;);
@@ -246,9 +247,9 @@ <h3>Parsing</h3>
246247
&#39;a2,b2\n&#39;;
247248

248249
csv
249-
.fromPath(CSV_STRING, {headers: true})
250+
.fromString(CSV_STRING, {headers: true})
250251
.on(&quot;record&quot;, function(data){
251-
console.log(data):
252+
console.log(data);
252253
})
253254
.on(&quot;end&quot;, function(){
254255
console.log(&quot;done&quot;);
@@ -260,7 +261,7 @@ <h3>Parsing</h3>
260261
csv()
261262
.fromStream(stream)
262263
.on(&quot;record&quot;, function(data){
263-
console.log(data):
264+
console.log(data);
264265
})
265266
.on(&quot;end&quot;, function(){
266267
console.log(&quot;done&quot;);
@@ -272,7 +273,7 @@ <h3>Parsing</h3>
272273
csv()
273274
.fromStream(stream, {headers : true})
274275
.on(&quot;record&quot;, function(data){
275-
console.log(data):
276+
console.log(data);
276277
})
277278
.on(&quot;end&quot;, function(){
278279
console.log(&quot;done&quot;);
@@ -284,7 +285,7 @@ <h3>Parsing</h3>
284285
csv
285286
.fromStream(stream, {headers : [&quot;firstName&quot;, &quot;lastName&quot;, &quot;address&quot;]})
286287
.on(&quot;record&quot;, function(data){
287-
console.log(data):
288+
console.log(data);
288289
})
289290
.on(&quot;end&quot;, function(){
290291
console.log(&quot;done&quot;);
@@ -297,7 +298,7 @@ <h3>Parsing</h3>
297298
csv
298299
.fromStream(stream, {ignoreEmpty: true})
299300
.on(&quot;record&quot;, function(data){
300-
console.log(data):
301+
console.log(data);
301302
})
302303
.on(&quot;end&quot;, function(){
303304
console.log(&quot;done&quot;);
@@ -316,7 +317,7 @@ <h3>Validating</h3>
316317
//do something with invalid row
317318
})
318319
.on(&quot;record&quot;, function(data){
319-
console.log(data):
320+
console.log(data);
320321
})
321322
.on(&quot;end&quot;, function(){
322323
console.log(&quot;done&quot;);
@@ -332,7 +333,7 @@ <h3>Transforming</h3>
332333
return data.reverse(); //reverse each row.
333334
})
334335
.on(&quot;record&quot;, function(data){
335-
console.log(data):
336+
console.log(data);
336337
})
337338
.on(&quot;end&quot;, function(){
338339
console.log(&quot;done&quot;);
@@ -342,6 +343,7 @@ <h3>Formatting</h3>
342343
<p>Formatting accepts the same options as parsing with an additional <code>transform</code> option.</p>
343344
<ul>
344345
<li><code>transform(row)</code>: A function that accepts a row and returns a transformed one to be written.</li>
346+
<li><code>rowDelimiter=&#39;\n&#39;</code>: Specify an alternate row delimiter (i.e <code>\r\n</code>)</li>
345347
</ul>
346348
<p><strong><code>createWriteStream(options)</code></strong></p>
347349
<p>This is the lowest level of the write methods, it creates a stream that can be used to create a csv of unknown size and pipe to an output csv.</p>
@@ -530,7 +532,7 @@ <h2>Piping from Parser to Writer</h2>
530532
emailAddress: obj.Email_Address,
531533
verified: obj.Verified
532534
};
533-
});
535+
});
534536
csv
535537
.fromPath(&quot;in.csv&quot;, {headers: true})
536538
.pipe(formatStream)

lib/formatter.js

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,7 @@ function __write(writer, arr, options) {
5252
var formatter = createFormatter(options),
5353
transformer = extended.has(options, "transform") ? options.transform : defaultTransform,
5454
hasHeaders = extended.has(options, "headers") ? options.headers : true,
55+
rowDelimiter = options.rowDelimiter || LINE_BREAK,
5556
headersLength = 0,
5657
i = -1,
5758
j = -1,
@@ -87,7 +88,7 @@ function __write(writer, arr, options) {
8788
}
8889
ret.push(formatter(vals));
8990
}
90-
writer.push(ret.join(LINE_BREAK));
91+
writer.push(ret.join(rowDelimiter));
9192
}
9293
}
9394

@@ -97,6 +98,7 @@ function CsvTransformStream(options) {
9798
options = options || {};
9899
Transform.call(this, options);
99100
this.formatter = createFormatter(options);
101+
this.rowDelimiter = options.rowDelimiter || "\n";
100102
var hasHeaders = this.hasHeaders = extended.has(options, "headers") ? options.headers : true;
101103
this.parsedHeaders = hasHeaders ? false : true;
102104
this.buffer = [];
@@ -134,7 +136,7 @@ extended(CsvTransformStream).extend({
134136
}
135137
}
136138
if (this.totalCount++) {
137-
buffer.push("\n");
139+
buffer.push(this.rowDelimiter);
138140
}
139141
if (isHash) {
140142
var i = -1, headersLength = this.headersLength;

package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "fast-csv",
3-
"version": "0.4.1",
3+
"version": "0.4.2",
44
"description": "CSV parser and writer",
55
"main": "index.js",
66
"scripts": {

test/fast-csv.test.js

Lines changed: 92 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -754,6 +754,16 @@ it.describe("fast-csv", function (it) {
754754
}
755755
}), "A,B\na1,b1\na2,b2");
756756
});
757+
758+
it.should("support specifying an alternate row delimiter", function () {
759+
assert.equal(csv.writeToString([
760+
{a: "a1", b: "b1"},
761+
{a: "a2", b: "b2"}
762+
], {
763+
headers: true,
764+
rowDelimiter: '\r\n'
765+
}), "a,b\r\na1,b1\r\na2,b2");
766+
});
757767
});
758768

759769
it.describe(".write", function (it) {
@@ -824,6 +834,18 @@ it.describe("fast-csv", function (it) {
824834
}
825835
}).on("error", next).pipe(ws);
826836
});
837+
838+
it.should("support specifying an alternate row delimiter", function (next) {
839+
var ws = new stream.Writable();
840+
ws._write = function (data) {
841+
assert.deepEqual(data.toString(), "a,b\r\na1,b1\r\na2,b2");
842+
next();
843+
};
844+
csv.write([
845+
{a: "a1", b: "b1"},
846+
{a: "a2", b: "b2"}
847+
], {headers: true, rowDelimiter: '\r\n'}).on("error", next).pipe(ws);
848+
});
827849
});
828850

829851
it.describe(".writeToPath", function (it) {
@@ -902,6 +924,20 @@ it.describe("fast-csv", function (it) {
902924
next();
903925
});
904926
});
927+
928+
it.should("support specifying an alternate row delimiter", function (next) {
929+
csv
930+
.writeToPath(path.resolve(__dirname, "assets/test.csv"), [
931+
{a: "a1", b: "b1"},
932+
{a: "a2", b: "b2"}
933+
], {headers: true, rowDelimiter: '\r\n'})
934+
.on("error", next)
935+
.on("finish", function () {
936+
assert.equal(fs.readFileSync(path.resolve(__dirname, "assets/test.csv")).toString(), "a,b\r\na1,b1\r\na2,b2");
937+
fs.unlinkSync(path.resolve(__dirname, "assets/test.csv"));
938+
next();
939+
});
940+
});
905941
});
906942

907943
it.describe(".createWriteStream", function (it) {
@@ -976,49 +1012,72 @@ it.describe("fast-csv", function (it) {
9761012
});
9771013
stream.write(null);
9781014
});
979-
});
9801015

981-
it.describe("piping from parser to formatter", function (it) {
982-
983-
it.should("allow piping from a parser to a formatter", function (next) {
1016+
it.should("support specifying an alternate row delimiter", function (next) {
9841017
var writable = fs.createWriteStream(path.resolve(__dirname, "assets/test.csv"), {encoding: "utf8"});
985-
csv
986-
.fromPath(path.resolve(__dirname, "./assets/test22.csv"), {headers: true, objectMode: true})
987-
.on("error", next)
988-
.pipe(csv.createWriteStream({headers: true}))
989-
.on("error", next)
990-
.pipe(writable)
1018+
var stream = csv
1019+
.createWriteStream({headers: true, rowDelimiter: '\r\n'})
9911020
.on("error", next);
992-
9931021
writable
9941022
.on("finish", function () {
995-
assert.equal(fs.readFileSync(path.resolve(__dirname, "assets/test.csv")).toString(), "a,b\na1,b1\na2,b2");
1023+
assert.equal(fs.readFileSync(path.resolve(__dirname, "assets/test.csv")).toString(), "a,b\r\na1,b1\r\na2,b2");
9961024
fs.unlinkSync(path.resolve(__dirname, "assets/test.csv"));
9971025
next();
9981026
});
1027+
stream.pipe(writable);
1028+
var vals = [
1029+
{a: "a1", b: "b1"},
1030+
{a: "a2", b: "b2"}
1031+
];
1032+
vals.forEach(function (item) {
1033+
stream.write(item);
1034+
});
1035+
stream.write(null);
9991036
});
10001037

1001-
it.should("preserve transforms", function (next) {
1002-
var writable = fs.createWriteStream(path.resolve(__dirname, "assets/test.csv"), {encoding: "utf8"});
1003-
csv
1004-
.fromPath(path.resolve(__dirname, "./assets/test22.csv"), {headers: true})
1005-
.transform(function (obj) {
1006-
obj.a = obj.a + "-parsed";
1007-
obj.b = obj.b + "-parsed";
1008-
return obj;
1009-
})
1010-
.on("error", next)
1011-
.pipe(csv.createWriteStream({headers: true}))
1012-
.on("error", next)
1013-
.pipe(writable)
1014-
.on("error", next);
10151038

1016-
writable
1017-
.on("finish", function () {
1018-
assert.equal(fs.readFileSync(path.resolve(__dirname, "assets/test.csv")).toString(), "a,b\na1-parsed,b1-parsed\na2-parsed,b2-parsed");
1019-
fs.unlinkSync(path.resolve(__dirname, "assets/test.csv"));
1020-
next();
1021-
});
1039+
it.describe("piping from parser to formatter", function (it) {
1040+
1041+
it.should("allow piping from a parser to a formatter", function (next) {
1042+
var writable = fs.createWriteStream(path.resolve(__dirname, "assets/test.csv"), {encoding: "utf8"});
1043+
csv
1044+
.fromPath(path.resolve(__dirname, "./assets/test22.csv"), {headers: true, objectMode: true})
1045+
.on("error", next)
1046+
.pipe(csv.createWriteStream({headers: true}))
1047+
.on("error", next)
1048+
.pipe(writable)
1049+
.on("error", next);
1050+
1051+
writable
1052+
.on("finish", function () {
1053+
assert.equal(fs.readFileSync(path.resolve(__dirname, "assets/test.csv")).toString(), "a,b\na1,b1\na2,b2");
1054+
fs.unlinkSync(path.resolve(__dirname, "assets/test.csv"));
1055+
next();
1056+
});
1057+
});
1058+
1059+
it.should("preserve transforms", function (next) {
1060+
var writable = fs.createWriteStream(path.resolve(__dirname, "assets/test.csv"), {encoding: "utf8"});
1061+
csv
1062+
.fromPath(path.resolve(__dirname, "./assets/test22.csv"), {headers: true})
1063+
.transform(function (obj) {
1064+
obj.a = obj.a + "-parsed";
1065+
obj.b = obj.b + "-parsed";
1066+
return obj;
1067+
})
1068+
.on("error", next)
1069+
.pipe(csv.createWriteStream({headers: true}))
1070+
.on("error", next)
1071+
.pipe(writable)
1072+
.on("error", next);
1073+
1074+
writable
1075+
.on("finish", function () {
1076+
assert.equal(fs.readFileSync(path.resolve(__dirname, "assets/test.csv")).toString(), "a,b\na1-parsed,b1-parsed\na2-parsed,b2-parsed");
1077+
fs.unlinkSync(path.resolve(__dirname, "assets/test.csv"));
1078+
next();
1079+
});
1080+
});
10221081
});
10231082
});
1024-
});
1083+
});

0 commit comments

Comments
 (0)