diff --git a/docs/content/docs/3.files/4.csv.md b/docs/content/docs/3.files/4.csv.md
index 7b9042dea..d2ba6ea44 100644
--- a/docs/content/docs/3.files/4.csv.md
+++ b/docs/content/docs/3.files/4.csv.md
@@ -10,13 +10,15 @@ import { defineCollection, defineContentConfig, z } from '@nuxt/content'
export default defineContentConfig({
collections: {
- authors: defineCollection({
+ charts: defineCollection({
type: 'data',
- source: 'authors/**.csv',
+ source: 'charts/**.csv',
schema: z.object({
- name: z.string(),
- email: z.string(),
- avatar: z.string()
+ // Body is important in CSV files, without body field you cannot access to data array
+ body: z.array(z.object({
+ label: z.string(),
+ value: z.number()
+ }))
})
})
}
@@ -29,17 +31,18 @@ export default defineContentConfig({
Create author files in `content/authors/` directory.
::code-group
-```csv [users.csv]
-id,name,email
-1,John Doe,john@example.com
-2,Jane Smith,jane@example.com
-3,Alice Johnson,alice@example.com
+```csv [content/charts/chart1.csv]
+label,value
+A,100
+B,200
+C,300
```
-```csv [team.csv]
-name,role,avatar
-John Doe,Developer,https://avatars.githubusercontent.com/u/1?v=4
-Jane Smith,Designer,https://avatars.githubusercontent.com/u/2?v=4
+```csv [content/charts/chart2.csv]
+label,value
+Foo,123
+Bar,456
+Baz,789
```
::
@@ -53,25 +56,25 @@ Now we can query authors:
```vue
- -
- {{ author.name }} ({{ author.email }})
+
-
+
@@ -139,4 +142,59 @@ id;name;email
::note
The CSV parser can be disabled by setting `csv: false` in the configuration if you don't need CSV support.
-::
+::
+
+## Single file source
+
+When you point a collection to a single CSV file (instead of a glob), Nuxt Content treats each data row as a separate item in the collection.
+
+- **Define the collection**: set `source` to the path of a single `.csv` file.
+- **Item generation**: each data row becomes an item with the row’s fields at the top level (no `body` array).
+- **IDs**: item IDs are suffixed with `#`, where `#1` is the first data row after the header.
+
+```ts [content.config.ts]
+import { defineCollection, defineContentConfig } from '@nuxt/content'
+import { z } from 'zod'
+
+export default defineContentConfig({
+ collections: {
+ people: defineCollection({
+ type: 'data',
+ source: 'org/people.csv',
+ schema: z.object({
+ name: z.string(),
+ email: z.string().email()
+ })
+ })
+ }
+})
+```
+
+```csv [content/org/people.csv]
+name,email
+Alice,alice@example.com
+Bob,bob@example.com
+```
+
+Each row produces its own item. For example, the first data row will have an ID ending with `#1` and the second with `#2`. You can query by any column:
+
+```ts
+const { data: alice } = await useAsyncData('alice', () =>
+ queryCollection('people')
+ .where('email', '=', 'alice@example.com')
+ .first()
+)
+
+const { data: allPeople } = await useAsyncData('all-people', () =>
+ queryCollection('people')
+ .order('name', 'ASC')
+ .all()
+)
+```
+
+::note
+- The header row is required and is not turned into an item.
+- With a single-file source, items contain row fields at the top level (no `body`).
+- If you prefer treating each CSV file as a single item containing all rows in `body`, use a glob source like `org/**.csv` instead of a single file.
+:::
+
diff --git a/playground/content.config.ts b/playground/content.config.ts
index 396b14243..a8089436e 100644
--- a/playground/content.config.ts
+++ b/playground/content.config.ts
@@ -66,6 +66,21 @@ const pages = defineCollection({
})
const collections = {
+ people: defineCollection({
+ type: 'data',
+ source: 'org/people.csv',
+ schema: z.object({
+ name: z.string(),
+ email: z.string().email(),
+ }),
+ }),
+ org: defineCollection({
+ type: 'data',
+ source: 'org/**.csv',
+ schema: z.object({
+ body: z.array(z.any()),
+ }),
+ }),
hackernews,
content,
data,
diff --git a/playground/content/org/people.csv b/playground/content/org/people.csv
new file mode 100644
index 000000000..0671ad01c
--- /dev/null
+++ b/playground/content/org/people.csv
@@ -0,0 +1,11 @@
+name,email
+John Doe,john.doe@example.com
+Jane Smith,jane.smith@example.com
+Bob Johnson,bob.johnson@example.com
+Alice Brown,alice.brown@example.com
+Charlie Wilson,charlie.wilson@example.com
+Diana Lee,diana.lee@example.com
+Eve Davis,eve.davis@example.com
+Frank Miller,frank.miller@example.com
+Grace Taylor,grace.taylor@example.com
+Henry Anderson,henry.anderson@example.com
diff --git a/playground/pages/org/data.vue b/playground/pages/org/data.vue
new file mode 100644
index 000000000..5ecec16bd
--- /dev/null
+++ b/playground/pages/org/data.vue
@@ -0,0 +1,10 @@
+
+
+
+
+
diff --git a/playground/pages/org/people.vue b/playground/pages/org/people.vue
new file mode 100644
index 000000000..3b456a21e
--- /dev/null
+++ b/playground/pages/org/people.vue
@@ -0,0 +1,10 @@
+
+
+
+
+
People
+
{{ tmpContent }}
+
+
diff --git a/src/utils/content/index.ts b/src/utils/content/index.ts
index 0a7c20197..e5f921c67 100644
--- a/src/utils/content/index.ts
+++ b/src/utils/content/index.ts
@@ -111,7 +111,7 @@ async function _getHighlightPlugin(key: string, options: HighlighterOptions) {
export async function createParser(collection: ResolvedCollection, nuxt?: Nuxt) {
const nuxtOptions = nuxt?.options as unknown as { content: ModuleOptions, mdc: MDCModuleOptions }
const mdcOptions = nuxtOptions?.mdc || {}
- const { pathMeta = {}, markdown = {}, transformers = [] } = nuxtOptions?.content?.build || {}
+ const { pathMeta = {}, markdown = {}, transformers = [], csv = {}, yaml = {} } = nuxtOptions?.content?.build || {}
const rehypeHighlightPlugin = markdown.highlight !== false
? await getHighlightPluginInstance(defu(markdown.highlight as HighlighterOptions, mdcOptions.highlight, { compress: true }))
@@ -149,6 +149,8 @@ export async function createParser(collection: ResolvedCollection, nuxt?: Nuxt)
},
highlight: undefined,
},
+ csv: csv,
+ yaml: yaml,
}
return async function parse(file: ContentFile) {
diff --git a/src/utils/content/transformers/csv/index.ts b/src/utils/content/transformers/csv/index.ts
index 6bc695824..5dda952ef 100644
--- a/src/utils/content/transformers/csv/index.ts
+++ b/src/utils/content/transformers/csv/index.ts
@@ -53,6 +53,13 @@ export default defineTransformer({
})
const { result } = await stream.process(file.body)
+ if (Array.isArray(result) && result.length === 1) {
+ return {
+ id: file.id,
+ ...result[0],
+ }
+ }
+
return {
id: file.id,
body: result,
diff --git a/src/utils/schema/index.ts b/src/utils/schema/index.ts
index d3fe04d2f..d8654d996 100644
--- a/src/utils/schema/index.ts
+++ b/src/utils/schema/index.ts
@@ -106,7 +106,7 @@ export function detectSchemaVendor(schema: ContentStandardSchemaV1) {
}
export function replaceComponentSchemas(property: T): T {
- if ((property as Draft07DefinitionProperty).type === 'array') {
+ if ((property as Draft07DefinitionProperty).type === 'array' && (property as Draft07DefinitionProperty).items) {
(property as Draft07DefinitionProperty).items = replaceComponentSchemas((property as Draft07DefinitionProperty).items as Draft07DefinitionProperty) as Draft07DefinitionProperty
}
diff --git a/src/utils/source.ts b/src/utils/source.ts
index 41c4ec01b..801f27c07 100644
--- a/src/utils/source.ts
+++ b/src/utils/source.ts
@@ -1,4 +1,5 @@
import { readFile } from 'node:fs/promises'
+import { createReadStream } from 'node:fs'
import { join, normalize } from 'pathe'
import { withLeadingSlash, withoutTrailingSlash } from 'ufo'
import { glob } from 'tinyglobby'
@@ -19,6 +20,12 @@ export function defineLocalSource(source: CollectionSource | ResolvedCollectionS
logger.warn('Collection source should not start with `./` or `../`.')
source.include = source.include.replace(/^(\.\/|\.\.\/|\/)*/, '')
}
+
+ // If source is a CSV file, define a CSV source
+ if (source.include.endsWith('.csv') && !source.include.includes('*')) {
+ return defineCSVSource(source)
+ }
+
const { fixed } = parseSourceBase(source)
const resolvedSource: ResolvedCollectionSource = {
_resolved: true,
@@ -105,6 +112,60 @@ export function defineBitbucketSource(
return resolvedSource
}
+export function defineCSVSource(source: CollectionSource): ResolvedCollectionSource {
+ const { fixed } = parseSourceBase(source)
+
+ const resolvedSource: ResolvedCollectionSource = {
+ _resolved: true,
+ prefix: withoutTrailingSlash(withLeadingSlash(fixed)),
+ prepare: async ({ rootDir }) => {
+ resolvedSource.cwd = source.cwd
+ ? String(normalize(source.cwd)).replace(/^~~\//, rootDir)
+ : join(rootDir, 'content')
+ },
+ getKeys: async () => {
+ const _keys = await glob(source.include, { cwd: resolvedSource.cwd, ignore: getExcludedSourcePaths(source), dot: true, expandDirectories: false })
+ .catch((): [] => [])
+ const keys = _keys.map(key => key.substring(fixed.length))
+ if (keys.length !== 1) {
+ return keys
+ }
+
+ return new Promise((resolve) => {
+ const csvKeys: string[] = []
+ let count = 0
+ createReadStream(join(resolvedSource.cwd, fixed, keys[0]!))
+ .on('data', function (chunk) {
+ for (let i = 0; i < chunk.length; i += 1)
+ if (chunk[i] == 10) {
+ if (count > 0) { // count === 0 is CSV header row and should not be included
+ csvKeys.push(`${keys[0]}#${count}`)
+ }
+ count += 1
+ }
+ })
+ .on('end', () => resolve(csvKeys))
+ })
+ },
+ getItem: async (key) => {
+ const [csvKey, csvIndex] = key.split('#')
+ const fullPath = join(resolvedSource.cwd, fixed, csvKey!)
+ const content = await readFile(fullPath, 'utf8')
+
+ if (key.includes('#')) {
+ const lines = content.split('\n')
+ return lines[0] + '\n' + lines[+(csvIndex || 0)]!
+ }
+
+ return content
+ },
+ ...source,
+ include: source.include,
+ cwd: '',
+ }
+ return resolvedSource
+}
+
export function parseSourceBase(source: CollectionSource) {
const [fixPart, ...rest] = source.include.includes('*') ? source.include.split('*') : ['', source.include]
return {
diff --git a/test/unit/parseContent.csv.test.ts b/test/unit/parseContent.csv.test.ts
index 948f3f63d..b827f55aa 100644
--- a/test/unit/parseContent.csv.test.ts
+++ b/test/unit/parseContent.csv.test.ts
@@ -83,11 +83,20 @@ describe('Parser (.csv)', async () => {
expect(parsed).toHaveProperty('id')
assert(parsed.id === 'content/index.csv')
- expect(parsed).toHaveProperty('body')
- expect(Array.isArray(parsed.body)).toBeTruthy()
- const truth = await csvToJson({ output: 'json' }).fromString(csv)
+ // Single line CSV files maps to a single object
+ if (csv.split('\n').length === 2) {
+ const truth = (await csvToJson({ output: 'json' }).fromString(csv))[0]
+ Object.keys(truth).forEach((key) => {
+ expect(parsed[key] || (parsed.meta as Record)[key]).toBe(truth[key])
+ })
+ }
+ else {
+ expect(parsed).toHaveProperty('body')
+ expect(Array.isArray(parsed.body)).toBeTruthy()
+ const truth = await csvToJson({ output: 'json' }).fromString(csv)
- expect(parsed.body).toMatchObject(truth)
+ expect(parsed.body).toMatchObject(truth)
+ }
})
}
})