Changeset 489 for Dev/trunk/src/node_modules/ya-csv/package.json
- Timestamp:
- 03/08/14 11:41:10 (11 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
Dev/trunk/src/node_modules/ya-csv/package.json
r484 r489 2 2 "name": "ya-csv", 3 3 "description": "CSV parser and generator for Node.js", 4 "version": "0.9. 2",4 "version": "0.9.3", 5 5 "keywords": [ 6 6 "node", … … 37 37 }, 38 38 "main": "lib/ya-csv", 39 "readme": "# ya-csv\n\nEvent based CSV parser and writer for Node.js suitable for processing large CSV streams.\n\n // A simple echo program:\n var csv = require('ya-csv');\n\n var reader = csv.createCsvStreamReader(process.openStdin());\n var writer = csv.createCsvStreamWriter(process.stdout);\n\n reader.addListener('data', function(data) {\n writer.writeRecord(data);\n });\n\n## Installation\n\n npm install ya-csv\n\nCurrent version requires at least Node.js v0.2.3 and it's tested with Node.js v0.4.12, 0.6.11 and 0.7.5. Hope it works with the other versions in between too.\n\n## Features\n\n - event based, suitable for processing big CSV streams\n - configurable separator, quote and escape characters (comma, double-quote and double-quote by default)\n - ignores lines starting with configurable comment character (off by default)\n - supports memory-only streaming\n\n## More examples\n\nEcho first column of the `data.csv` file:\n\n // equivalent of csv.createCsvFileReader('data.csv') \n var reader = csv.createCsvFileReader('data.csv', {\n 'separator': ',',\n 'quote': '\"',\n 'escape': '\"', \n 'comment': '',\n });\n var writer = new csv.CsvWriter(process.stdout);\n reader.addListener('data', function(data) {\n writer.writeRecord([ data[0] ]);\n });\n\nReturn data in objects rather than arrays: either by grabbing the column names form the header row (first row is not passed to the `data` listener):\n\n var reader = csv.createCsvFileReader('data.csv', { columnsFromHeader: true });\n reader.addListener('data', function(data) {\n // supposing there are so named columns in the source file\n sys.puts(data.col1 + \" ... \" + data.col2);\n });\n\n... or by providing column names from the client code (first row is passed to the `data` listener in this case):\n\n var reader = csv.createCsvFileReader('data.csv');\n reader.setColumnNames([ 'col1', 'col2' ]);\n reader.addListener('data', function(data) {\n sys.puts(data.col1 + \" ... \" + data.col2);\n });\n\nNote `reader.setColumnNames()` resets the column names so next invocation of the `data` listener will again receive the data in an array rather than an object.\n\nConvert the `/etc/passwd` file to comma separated format, drop commented lines and dump the results to the standard output:\n\n var reader = csv.createCsvFileReader('/etc/passwd', {\n 'separator': ':',\n 'quote': '\"',\n 'escape': '\"',\n 'comment': '#',\n });\n var writer = new csv.CsvWriter(process.stdout);\n reader.addListener('data', function(data) {\n writer.writeRecord(data);\n });\n\nParsing an upload as the data comes in, using node-formidable:\n\n upload_form.onPart = function(part) {\n if (!part.filename) { upload_form.handlePart(part); return }\n\n var reader = csv.createCsvFileReader({'comment': '#'});\n reader.addListener('data', function(data) {\n saveRecord(data);\n });\n\n part.on('data', function(buffer) {\n // Pipe incoming data into the reader.\n reader.parse(buffer);\n });\n part.on('end', function() {\n reader.end()\n }\n }\n",39 "readme": "# ya-csv\n\nEvent based CSV parser and writer for Node.js suitable for processing large CSV streams.\n\n // A simple echo program:\n var csv = require('ya-csv');\n\n var reader = csv.createCsvStreamReader(process.openStdin());\n var writer = csv.createCsvStreamWriter(process.stdout);\n\n reader.addListener('data', function(data) {\n writer.writeRecord(data);\n });\n\n## Installation\n\n npm install ya-csv\n\nCurrent version requires at least Node.js v0.2.3 and it's tested with Node.js v0.4.12, 0.6.11, 0.7.5 and v0.10.24. Hope it works with the other versions in between too.\n\n## Features\n\n - event based, suitable for processing big CSV streams\n - configurable separator, quote and escape characters (comma, double-quote and double-quote by default)\n - ignores lines starting with configurable comment character (off by default)\n - supports memory-only streaming\n\n## More examples\n\nEcho first column of the `data.csv` file:\n\n // equivalent of csv.createCsvFileReader('data.csv') \n var reader = csv.createCsvFileReader('data.csv', {\n 'separator': ',',\n 'quote': '\"',\n 'escape': '\"', \n 'comment': '',\n });\n var writer = new csv.CsvWriter(process.stdout);\n reader.addListener('data', function(data) {\n writer.writeRecord([ data[0] ]);\n });\n\nReturn data in objects rather than arrays: either by grabbing the column names form the header row (first row is not passed to the `data` listener):\n\n var reader = csv.createCsvFileReader('data.csv', { columnsFromHeader: true });\n reader.addListener('data', function(data) {\n // supposing there are so named columns in the source file\n sys.puts(data.col1 + \" ... \" + data.col2);\n });\n\n... or by providing column names from the client code (first row is passed to the `data` listener in this case):\n\n var reader = csv.createCsvFileReader('data.csv');\n reader.setColumnNames([ 'col1', 'col2' ]);\n reader.addListener('data', function(data) {\n sys.puts(data.col1 + \" ... \" + data.col2);\n });\n\nNote `reader.setColumnNames()` resets the column names so next invocation of the `data` listener will again receive the data in an array rather than an object.\n\nConvert the `/etc/passwd` file to comma separated format, drop commented lines and dump the results to the standard output:\n\n var reader = csv.createCsvFileReader('/etc/passwd', {\n 'separator': ':',\n 'quote': '\"',\n 'escape': '\"',\n 'comment': '#',\n });\n var writer = new csv.CsvWriter(process.stdout);\n reader.addListener('data', function(data) {\n writer.writeRecord(data);\n });\n\nParsing an upload as the data comes in, using node-formidable:\n\n upload_form.onPart = function(part) {\n if (!part.filename) { upload_form.handlePart(part); return }\n\n var reader = csv.createCsvFileReader({'comment': '#'});\n reader.addListener('data', function(data) {\n saveRecord(data);\n });\n\n part.on('data', function(buffer) {\n // Pipe incoming data into the reader.\n reader.parse(buffer);\n });\n part.on('end', function() {\n reader.end()\n }\n }\n", 40 40 "readmeFilename": "README.md", 41 "_id": "ya-csv@0.9.2", 41 "homepage": "https://github.com/koles/ya-csv", 42 "_id": "ya-csv@0.9.3", 42 43 "dist": { 43 "shasum": " 088ddf523ca066bbe654146100cf70030d9f6779"44 "shasum": "e3b1e84b699c569949b8b68a319a1b065ac456b3" 44 45 }, 45 "_from": "ya-csv@ ",46 "_resolved": "https://registry.npmjs.org/ya-csv/-/ya-csv-0.9. 2.tgz"46 "_from": "ya-csv@0.9.3", 47 "_resolved": "https://registry.npmjs.org/ya-csv/-/ya-csv-0.9.3.tgz" 47 48 }
Note: See TracChangeset
for help on using the changeset viewer.