source: Dev/trunk/src/node_modules/ya-csv/README.md @ 519

Last change on this file since 519 was 489, checked in by hendrikvanantwerpen, 11 years ago

Update node modules

File size: 3.1 KB
Line 
1# ya-csv
2
3Event based CSV parser and writer for Node.js suitable for processing large CSV streams.
4
5    // A simple echo program:
6    var csv = require('ya-csv');
7
8    var reader = csv.createCsvStreamReader(process.openStdin());
9    var writer = csv.createCsvStreamWriter(process.stdout);
10
11    reader.addListener('data', function(data) {
12        writer.writeRecord(data);
13    });
14
15## Installation
16
17    npm install ya-csv
18
19Current version requires at least Node.js v0.2.3 and it's tested with Node.js v0.4.12, 0.6.11, 0.7.5 and v0.10.24. Hope it works with the other versions in between too.
20
21## Features
22
23 - event based, suitable for processing big CSV streams
24 - configurable separator, quote and escape characters (comma, double-quote and double-quote by default)
25 - ignores lines starting with configurable comment character (off by default)
26 - supports memory-only streaming
27
28## More examples
29
30Echo first column of the `data.csv` file:
31
32    // equivalent of csv.createCsvFileReader('data.csv')
33    var reader = csv.createCsvFileReader('data.csv', {
34        'separator': ',',
35        'quote': '"',
36        'escape': '"',       
37        'comment': '',
38    });
39    var writer = new csv.CsvWriter(process.stdout);
40    reader.addListener('data', function(data) {
41        writer.writeRecord([ data[0] ]);
42    });
43
44Return data in objects rather than arrays: either by grabbing the column names form the header row (first row is not passed to the `data` listener):
45
46    var reader = csv.createCsvFileReader('data.csv', { columnsFromHeader: true });
47    reader.addListener('data', function(data) {
48        // supposing there are so named columns in the source file
49        sys.puts(data.col1 + " ... " + data.col2);
50    });
51
52... or by providing column names from the client code (first row is passed to the `data` listener in this case):
53
54    var reader = csv.createCsvFileReader('data.csv');
55    reader.setColumnNames([ 'col1', 'col2' ]);
56    reader.addListener('data', function(data) {
57        sys.puts(data.col1 + " ... " + data.col2);
58    });
59
60Note `reader.setColumnNames()` resets the column names so next invocation of the `data` listener will again receive the data in an array rather than an object.
61
62Convert the `/etc/passwd` file to comma separated format, drop commented lines and dump the results to the standard output:
63
64    var reader = csv.createCsvFileReader('/etc/passwd', {
65        'separator': ':',
66        'quote': '"',
67        'escape': '"',
68        'comment': '#',
69    });
70    var writer = new csv.CsvWriter(process.stdout);
71    reader.addListener('data', function(data) {
72        writer.writeRecord(data);
73    });
74
75Parsing an upload as the data comes in, using node-formidable:
76
77    upload_form.onPart = function(part) {
78        if (!part.filename) { upload_form.handlePart(part); return }
79
80        var reader = csv.createCsvFileReader({'comment': '#'});
81        reader.addListener('data', function(data) {
82            saveRecord(data);
83        });
84
85        part.on('data', function(buffer) {
86            // Pipe incoming data into the reader.
87            reader.parse(buffer);
88        });
89        part.on('end', function() {
90            reader.end()
91        }
92    }
Note: See TracBrowser for help on using the repository browser.