Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
102 changes: 80 additions & 22 deletions docs/content/docs/3.files/4.csv.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,15 @@ import { defineCollection, defineContentConfig, z } from '@nuxt/content'

export default defineContentConfig({
collections: {
authors: defineCollection({
charts: defineCollection({
type: 'data',
source: 'authors/**.csv',
source: 'charts/**.csv',
schema: z.object({
name: z.string(),
email: z.string(),
avatar: z.string()
// Body is important in CSV files, without body field you cannot access to data array
body: z.array(z.object({
label: z.string(),
value: z.number()
}))
})
})
}
Expand All @@ -29,17 +31,18 @@ export default defineContentConfig({
Create author files in `content/authors/` directory.

::code-group
```csv [users.csv]
id,name,email
1,John Doe,[email protected]
2,Jane Smith,[email protected]
3,Alice Johnson,[email protected]
```csv [content/charts/chart1.csv]
label,value
A,100
B,200
C,300
```

```csv [team.csv]
name,role,avatar
John Doe,Developer,https://avatars.githubusercontent.com/u/1?v=4
Jane Smith,Designer,https://avatars.githubusercontent.com/u/2?v=4
```csv [content/charts/chart2.csv]
label,value
Foo,123
Bar,456
Baz,789
```
::

Expand All @@ -53,25 +56,25 @@ Now we can query authors:

```vue
<script lang="ts" setup>
// Find a single author
const { data: author } = await useAsyncData('john-doe', () => {
return queryCollection('authors')
.where('name', '=', 'John Doe')
// Find a single chart
const { data: author } = await useAsyncData('chart1', () => {
return queryCollection('charts')
.where('id', '=', 'charts/charts/chart1.csv')
.first()
})

// Get all authors
const { data: authors } = await useAsyncData('authors', () => {
return queryCollection('authors')
.order('name', 'ASC')
.order('id', 'ASC')
.all()
})
</script>

<template>
<ul>
<li v-for="author in authors" :key="author.id">
{{ author.name }} ({{ author.email }})
<li v-for="chart in charts" :key="chart.id">
<!-- get data from chart.body -->
</li>
</ul>
</template>
Expand Down Expand Up @@ -139,4 +142,59 @@ id;name;email

::note
The CSV parser can be disabled by setting `csv: false` in the configuration if you don't need CSV support.
::
::

## Single file source

When you point a collection to a single CSV file (instead of a glob), Nuxt Content treats each data row as a separate item in the collection.

- **Define the collection**: set `source` to the path of a single `.csv` file.
- **Item generation**: each data row becomes an item with the row’s fields at the top level (no `body` array).
- **IDs**: item IDs are suffixed with `#<rowNumber>`, where `#1` is the first data row after the header.

```ts [content.config.ts]
import { defineCollection, defineContentConfig } from '@nuxt/content'
import { z } from 'zod'

export default defineContentConfig({
collections: {
people: defineCollection({
type: 'data',
source: 'org/people.csv',
schema: z.object({
name: z.string(),
email: z.string().email()
})
})
}
})
```

```csv [content/org/people.csv]
name,email
Alice,[email protected]
Bob,[email protected]
```

Each row produces its own item. For example, the first data row will have an ID ending with `#1` and the second with `#2`. You can query by any column:

```ts
const { data: alice } = await useAsyncData('alice', () =>
queryCollection('people')
.where('email', '=', '[email protected]')
.first()
)

const { data: allPeople } = await useAsyncData('all-people', () =>
queryCollection('people')
.order('name', 'ASC')
.all()
)
```

::note
- The header row is required and is not turned into an item.
- With a single-file source, items contain row fields at the top level (no `body`).
- If you prefer treating each CSV file as a single item containing all rows in `body`, use a glob source like `org/**.csv` instead of a single file.
:::

15 changes: 15 additions & 0 deletions playground/content.config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,21 @@ const pages = defineCollection({
})

const collections = {
people: defineCollection({
type: 'data',
source: 'org/people.csv',
schema: z.object({
name: z.string(),
email: z.string().email(),
}),
}),
org: defineCollection({
type: 'data',
source: 'org/**.csv',
schema: z.object({
body: z.array(z.any()),
}),
}),
hackernews,
content,
data,
Expand Down
11 changes: 11 additions & 0 deletions playground/content/org/people.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
name,email
John Doe,[email protected]
Jane Smith,[email protected]
Bob Johnson,[email protected]
Alice Brown,[email protected]
Charlie Wilson,[email protected]
Diana Lee,[email protected]
Eve Davis,[email protected]
Frank Miller,[email protected]
Grace Taylor,[email protected]
Henry Anderson,[email protected]
10 changes: 10 additions & 0 deletions playground/pages/org/data.vue
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
<script setup lang="ts">
const { data } = await useAsyncData('tmp-content', () => queryCollection('org').all())
</script>

<template>
<div>
<h1>People</h1>
<pre>{{ data }}</pre>
</div>
</template>
10 changes: 10 additions & 0 deletions playground/pages/org/people.vue
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
<script setup lang="ts">
const { data: tmpContent } = await useAsyncData('tmp-content', () => queryCollection('people').all())
</script>

<template>
<div>
<h1>People</h1>
<pre>{{ tmpContent }}</pre>
</div>
</template>
4 changes: 3 additions & 1 deletion src/utils/content/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ async function _getHighlightPlugin(key: string, options: HighlighterOptions) {
export async function createParser(collection: ResolvedCollection, nuxt?: Nuxt) {
const nuxtOptions = nuxt?.options as unknown as { content: ModuleOptions, mdc: MDCModuleOptions }
const mdcOptions = nuxtOptions?.mdc || {}
const { pathMeta = {}, markdown = {}, transformers = [] } = nuxtOptions?.content?.build || {}
const { pathMeta = {}, markdown = {}, transformers = [], csv = {}, yaml = {} } = nuxtOptions?.content?.build || {}

const rehypeHighlightPlugin = markdown.highlight !== false
? await getHighlightPluginInstance(defu(markdown.highlight as HighlighterOptions, mdcOptions.highlight, { compress: true }))
Expand Down Expand Up @@ -149,6 +149,8 @@ export async function createParser(collection: ResolvedCollection, nuxt?: Nuxt)
},
highlight: undefined,
},
csv: csv,
yaml: yaml,
}

return async function parse(file: ContentFile) {
Expand Down
7 changes: 7 additions & 0 deletions src/utils/content/transformers/csv/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,13 @@ export default defineTransformer({
})
const { result } = await stream.process(file.body)

if (Array.isArray(result) && result.length === 1) {
return {
id: file.id,
...result[0],
}
}

return {
id: file.id,
body: result,
Expand Down
2 changes: 1 addition & 1 deletion src/utils/schema/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ export function detectSchemaVendor(schema: ContentStandardSchemaV1) {
}

export function replaceComponentSchemas<T = Draft07Definition | Draft07DefinitionProperty>(property: T): T {
if ((property as Draft07DefinitionProperty).type === 'array') {
if ((property as Draft07DefinitionProperty).type === 'array' && (property as Draft07DefinitionProperty).items) {
(property as Draft07DefinitionProperty).items = replaceComponentSchemas((property as Draft07DefinitionProperty).items as Draft07DefinitionProperty) as Draft07DefinitionProperty
}

Expand Down
61 changes: 61 additions & 0 deletions src/utils/source.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { readFile } from 'node:fs/promises'
import { createReadStream } from 'node:fs'
import { join, normalize } from 'pathe'
import { withLeadingSlash, withoutTrailingSlash } from 'ufo'
import { glob } from 'tinyglobby'
Expand All @@ -19,6 +20,12 @@ export function defineLocalSource(source: CollectionSource | ResolvedCollectionS
logger.warn('Collection source should not start with `./` or `../`.')
source.include = source.include.replace(/^(\.\/|\.\.\/|\/)*/, '')
}

// If source is a CSV file, define a CSV source
if (source.include.endsWith('.csv') && !source.include.includes('*')) {
return defineCSVSource(source)
}

const { fixed } = parseSourceBase(source)
const resolvedSource: ResolvedCollectionSource = {
_resolved: true,
Expand Down Expand Up @@ -105,6 +112,60 @@ export function defineBitbucketSource(
return resolvedSource
}

export function defineCSVSource(source: CollectionSource): ResolvedCollectionSource {
const { fixed } = parseSourceBase(source)

const resolvedSource: ResolvedCollectionSource = {
_resolved: true,
prefix: withoutTrailingSlash(withLeadingSlash(fixed)),
prepare: async ({ rootDir }) => {
resolvedSource.cwd = source.cwd
? String(normalize(source.cwd)).replace(/^~~\//, rootDir)
: join(rootDir, 'content')
},
getKeys: async () => {
const _keys = await glob(source.include, { cwd: resolvedSource.cwd, ignore: getExcludedSourcePaths(source), dot: true, expandDirectories: false })
.catch((): [] => [])
const keys = _keys.map(key => key.substring(fixed.length))
if (keys.length !== 1) {
return keys
}

return new Promise((resolve) => {
const csvKeys: string[] = []
let count = 0
createReadStream(join(resolvedSource.cwd, fixed, keys[0]!))
.on('data', function (chunk) {
for (let i = 0; i < chunk.length; i += 1)
if (chunk[i] == 10) {
if (count > 0) { // count === 0 is CSV header row and should not be included
csvKeys.push(`${keys[0]}#${count}`)
}
count += 1
}
})
.on('end', () => resolve(csvKeys))
})
},
getItem: async (key) => {
const [csvKey, csvIndex] = key.split('#')
const fullPath = join(resolvedSource.cwd, fixed, csvKey!)
const content = await readFile(fullPath, 'utf8')

if (key.includes('#')) {
const lines = content.split('\n')
return lines[0] + '\n' + lines[+(csvIndex || 0)]!
}

return content
},
...source,
include: source.include,
cwd: '',
}
return resolvedSource
}

export function parseSourceBase(source: CollectionSource) {
const [fixPart, ...rest] = source.include.includes('*') ? source.include.split('*') : ['', source.include]
return {
Expand Down
17 changes: 13 additions & 4 deletions test/unit/parseContent.csv.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -83,11 +83,20 @@ describe('Parser (.csv)', async () => {
expect(parsed).toHaveProperty('id')
assert(parsed.id === 'content/index.csv')

expect(parsed).toHaveProperty('body')
expect(Array.isArray(parsed.body)).toBeTruthy()
const truth = await csvToJson({ output: 'json' }).fromString(csv)
// Single line CSV files maps to a single object
if (csv.split('\n').length === 2) {
const truth = (await csvToJson({ output: 'json' }).fromString(csv))[0]
Object.keys(truth).forEach((key) => {
expect(parsed[key] || (parsed.meta as Record<string, unknown>)[key]).toBe(truth[key])
})
}
else {
expect(parsed).toHaveProperty('body')
expect(Array.isArray(parsed.body)).toBeTruthy()
const truth = await csvToJson({ output: 'json' }).fromString(csv)

expect(parsed.body).toMatchObject(truth)
expect(parsed.body).toMatchObject(truth)
}
})
}
})
Loading