---
title: "@std/csv"
description: "Reading and writing of comma-separated values (CSV) files"
jsr: jsr:@std/csv
pkg: csv
version: 1.0.6
generated: true
stability: stable
---
<!-- Autogenerated from JSR docs. Do not edit directly. -->

## Overview

<p>Reads and writes comma-separated values (CSV) files.</p>
<h2 id="parsing-csv">
Parsing CSV</h2>

```js
import { parse } from "@std/csv/parse";
import { assertEquals } from "@std/assert";

const string = "a,b,c\nd,e,f";

// Parse as array of arrays (default)
assertEquals(parse(string, { skipFirstRow: false }), [["a", "b", "c"], ["d", "e", "f"]]);

// Parse csv file with headers into array of objects
assertEquals(parse(string, { skipFirstRow: true }), [{ a: "d", b: "e", c: "f" }]);

// Parse with custom column names
assertEquals(parse(string, { columns: ["x", "y", "z"] }), [
  { x: "a", y: "b", z: "c" },
  { x: "d", y: "e", z: "f" }
]);

// Parse tab-separated values
const tsvString = "name\tage\tcity\njohn\t30\tnew york\nmary\t25\tlos angeles";
assertEquals(parse(tsvString, { separator: "\t", skipFirstRow: true }), [
  { name: "john", age: "30", city: "new york" },
  { name: "mary", age: "25", city: "los angeles" }
]);

// Parse a CSV file which has comments
const csvWithComments = "# This is a comment\nname,age,city\n# Another comment\njohn,30,new york\nmary,25,los angeles";
assertEquals(parse(csvWithComments, { comment: "#", skipFirstRow: true }), [
  { name: "john", age: "30", city: "new york" },
  { name: "mary", age: "25", city: "los angeles" }
]);
```

<h2 id="parsing-csv-from-a-stream">
Parsing CSV from a Stream</h2>

```js
import { CsvParseStream } from "@std/csv/parse-stream";
import { assertEquals } from "@std/assert";

// Parse from a stream (useful for large files)
const source = ReadableStream.from([
  "name,age,city\n",
  "john,30,new york\n",
  "mary,25,los angeles\n"
]);

const csvStream = source
  .pipeThrough(new CsvParseStream({ skipFirstRow: true }));

const records = await Array.fromAsync(csvStream);
assertEquals(records, [
  { name: "john", age: "30", city: "new york" },
  { name: "mary", age: "25", city: "los angeles" }
]);

// Or process records one by one
// for await (const record of csvStream) {
//   console.log(record);
// }
```

<h2 id="stringifying-data-to-csv">
Stringifying Data to CSV</h2>

```js
import { stringify } from "@std/csv/stringify";
import { assertEquals } from "@std/assert";

// Convert array of arrays to CSV
const arrayData = [["name", "age", "city"], ["john", "30", "new york"], ["mary", "25", "los angeles"]];
const csvString = stringify(arrayData);
assertEquals(csvString, "name,age,city\r\njohn,30,new york\r\nmary,25,los angeles\r\n");

// Convert array of objects to CSV
const objectData = [
  { name: "john", age: "30", city: "new york" },
  { name: "mary", age: "25", city: "los angeles" }
];

// When using an array of objects, you must specify columns to use
const customColumns = stringify(objectData, { columns: ["city", "name", "age"] });
assertEquals(customColumns, "city,name,age\r\nnew york,john,30\r\nlos angeles,mary,25\r\n");
```

<h2 id="streaming-stringify-data-to-csv">
Streaming Stringify Data to CSV</h2>

```js
import { CsvStringifyStream } from "@std/csv/stringify-stream";
import { assertEquals } from "@std/assert";

async function writeCsvToTempFile(): Promise<string> {
  const path = await Deno.makeTempFile();
  using file = await Deno.open(path, { write: true });

  const readable = ReadableStream.from([
    { id: 1, name: "one" },
    { id: 2, name: "two" },
    { id: 3, name: "three" },
  ]);

  await readable
    .pipeThrough(new CsvStringifyStream({ columns: ["id", "name"] }))
    .pipeThrough(new TextEncoderStream())
    .pipeTo(file.writable);

  return path;
}

const path = await writeCsvToTempFile();
const content = await Deno.readTextFile(path);
assertEquals(content, "id,name\r\n1,one\r\n2,two\r\n3,three\r\n");
```

<h2 id="csv-format-information">
CSV Format Information</h2>
<p>There are many kinds of CSV files; this module supports the format described
in <a href="https://www.rfc-editor.org/rfc/rfc4180.html" rel="nofollow">RFC 4180</a>.</p>
<p>A csv file contains zero or more records of one or more fields per record.
Each record is separated by the newline character. The final record may
optionally be followed by a newline character.</p>

```js
field1,field2,field3
```

<p>White space is considered part of a field.</p>
<p>Carriage returns before newline characters are silently removed.</p>
<p>Blank lines are ignored. A line with only whitespace characters (excluding
the ending newline character) is not considered a blank line.</p>
<p>Fields which start and stop with the quote character " are called
quoted-fields. The beginning and ending quote are not part of the field.</p>
<p>The source:</p>

```js
normal string,"quoted-field"
```

<p>results in the fields</p>

```js
[`normal string`, `quoted-field`]
```

<p>Within a quoted-field a quote character followed by a second quote character is considered a single quote.</p>

```js
"the ""word"" is true","a ""quoted-field"""
```

<p>results in</p>

```js
[`the "word" is true`, `a "quoted-field"`]
```

<p>Newlines and commas may be included in a quoted-field</p>

```js
"Multi-line
field","comma is ,"
```

<p>results in</p>

```js
[`Multi-line
field`, `comma is ,`]
```

### Add to your project

```sh
deno add jsr:@std/csv
```

<a href="https://jsr.io/@std/csv/doc" class="docs-cta jsr-cta">See all symbols in @std/csv on
<svg class="inline ml-1" viewBox="0 0 13 7" aria-hidden="true" height="20"><path d="M0,2h2v-2h7v1h4v4h-2v2h-7v-1h-4" fill="#083344"></path><g fill="#f7df1e"><path d="M1,3h1v1h1v-3h1v4h-3"></path><path d="M5,1h3v1h-2v1h2v3h-3v-1h2v-1h-2"></path><path d="M9,2h3v2h-1v-1h-1v3h-1"></path></g></svg></a>

<!-- custom:start -->
<!-- Add persistent custom content below. This section is preserved across generations. -->

<!-- custom:end -->
