Skip to content

Commit 8f85da8

Browse files
authored
fix(plugin-import-export): json preview and downloads preserve nesting and exclude disabled fields (#13210)
### What? Improves both the JSON preview and export functionality in the import-export plugin: - Preserves proper nesting of object and array fields (e.g., groups, tabs, arrays) - Excludes any fields explicitly marked as `disabled` via `custom.plugin-import-export` - Ensures downloaded files use proper JSON formatting when `format` is `json` (no CSV-style flattening) ### Why? Previously: - The JSON preview flattened all fields to a single level and included disabled fields. - Exported files with `format: json` were still CSV-style data encoded as `.json`, rather than real JSON. ### How? - Refactored `/preview-data` JSON handling to preserve original document shape. - Applied `removeDisabledFields` to clean nested fields using dot-notation paths. - Updated `createExport` to skip `flattenObject` for JSON formats, using a nested JSON filter instead. - Fixed streaming and buffered export paths to output valid JSON arrays when `format` is `json`.
1 parent e48427e commit 8f85da8

File tree

10 files changed

+414
-81
lines changed

10 files changed

+414
-81
lines changed

packages/plugin-import-export/src/components/Preview/index.tsx

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -68,6 +68,7 @@ export const Preview = () => {
6868
collectionSlug,
6969
draft,
7070
fields,
71+
format,
7172
limit,
7273
locale,
7374
sort,
@@ -115,8 +116,13 @@ export const Preview = () => {
115116

116117
const fieldKeys =
117118
Array.isArray(fields) && fields.length > 0
118-
? selectedKeys // strictly only what was selected
119-
: [...selectedKeys, ...defaultMetaFields.filter((key) => allKeys.includes(key))]
119+
? selectedKeys // strictly use selected fields only
120+
: [
121+
...selectedKeys,
122+
...defaultMetaFields.filter(
123+
(key) => allKeys.includes(key) && !selectedKeys.includes(key),
124+
),
125+
]
120126

121127
// Build columns based on flattened keys
122128
const newColumns: Column[] = fieldKeys.map((key) => ({
@@ -158,6 +164,7 @@ export const Preview = () => {
158164
disabledFieldRegexes,
159165
draft,
160166
fields,
167+
format,
161168
i18n,
162169
limit,
163170
locale,

packages/plugin-import-export/src/export/createExport.ts

Lines changed: 90 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -114,7 +114,7 @@ export const createExport = async (args: CreateExportArgs) => {
114114

115115
const disabledRegexes: RegExp[] = disabledFields.map(buildDisabledFieldRegex)
116116

117-
const filterDisabled = (row: Record<string, unknown>): Record<string, unknown> => {
117+
const filterDisabledCSV = (row: Record<string, unknown>): Record<string, unknown> => {
118118
const filtered: Record<string, unknown> = {}
119119

120120
for (const [key, value] of Object.entries(row)) {
@@ -127,35 +127,62 @@ export const createExport = async (args: CreateExportArgs) => {
127127
return filtered
128128
}
129129

130+
const filterDisabledJSON = (doc: any, parentPath = ''): any => {
131+
if (Array.isArray(doc)) {
132+
return doc.map((item) => filterDisabledJSON(item, parentPath))
133+
}
134+
135+
if (typeof doc !== 'object' || doc === null) {
136+
return doc
137+
}
138+
139+
const filtered: Record<string, any> = {}
140+
for (const [key, value] of Object.entries(doc)) {
141+
const currentPath = parentPath ? `${parentPath}.${key}` : key
142+
143+
// Only remove if this exact path is disabled
144+
const isDisabled = disabledFields.includes(currentPath)
145+
146+
if (!isDisabled) {
147+
filtered[key] = filterDisabledJSON(value, currentPath)
148+
}
149+
}
150+
151+
return filtered
152+
}
153+
130154
if (download) {
131155
if (debug) {
132156
req.payload.logger.debug('Pre-scanning all columns before streaming')
133157
}
134158

135-
const allColumnsSet = new Set<string>()
136159
const allColumns: string[] = []
137-
let scanPage = 1
138-
let hasMore = true
139-
140-
while (hasMore) {
141-
const result = await payload.find({ ...findArgs, page: scanPage })
142-
143-
result.docs.forEach((doc) => {
144-
const flat = filterDisabled(flattenObject({ doc, fields, toCSVFunctions }))
145-
Object.keys(flat).forEach((key) => {
146-
if (!allColumnsSet.has(key)) {
147-
allColumnsSet.add(key)
148-
allColumns.push(key)
149-
}
160+
161+
if (isCSV) {
162+
const allColumnsSet = new Set<string>()
163+
let scanPage = 1
164+
let hasMore = true
165+
166+
while (hasMore) {
167+
const result = await payload.find({ ...findArgs, page: scanPage })
168+
169+
result.docs.forEach((doc) => {
170+
const flat = filterDisabledCSV(flattenObject({ doc, fields, toCSVFunctions }))
171+
Object.keys(flat).forEach((key) => {
172+
if (!allColumnsSet.has(key)) {
173+
allColumnsSet.add(key)
174+
allColumns.push(key)
175+
}
176+
})
150177
})
151-
})
152178

153-
hasMore = result.hasNextPage
154-
scanPage += 1
155-
}
179+
hasMore = result.hasNextPage
180+
scanPage += 1
181+
}
156182

157-
if (debug) {
158-
req.payload.logger.debug(`Discovered ${allColumns.length} columns`)
183+
if (debug) {
184+
req.payload.logger.debug(`Discovered ${allColumns.length} columns`)
185+
}
159186
}
160187

161188
const encoder = new TextEncoder()
@@ -171,35 +198,58 @@ export const createExport = async (args: CreateExportArgs) => {
171198
}
172199

173200
if (result.docs.length === 0) {
201+
// Close JSON array properly if JSON
202+
if (!isCSV) {
203+
this.push(encoder.encode(']'))
204+
}
174205
this.push(null)
175206
return
176207
}
177208

178-
const batchRows = result.docs.map((doc) =>
179-
filterDisabled(flattenObject({ doc, fields, toCSVFunctions })),
180-
)
181-
182-
const paddedRows = batchRows.map((row) => {
183-
const fullRow: Record<string, unknown> = {}
184-
for (const col of allColumns) {
185-
fullRow[col] = row[col] ?? ''
209+
if (isCSV) {
210+
// --- CSV Streaming ---
211+
const batchRows = result.docs.map((doc) =>
212+
filterDisabledCSV(flattenObject({ doc, fields, toCSVFunctions })),
213+
)
214+
215+
const paddedRows = batchRows.map((row) => {
216+
const fullRow: Record<string, unknown> = {}
217+
for (const col of allColumns) {
218+
fullRow[col] = row[col] ?? ''
219+
}
220+
return fullRow
221+
})
222+
223+
const csvString = stringify(paddedRows, {
224+
header: isFirstBatch,
225+
columns: allColumns,
226+
})
227+
228+
this.push(encoder.encode(csvString))
229+
} else {
230+
// --- JSON Streaming ---
231+
const batchRows = result.docs.map((doc) => filterDisabledJSON(doc))
232+
233+
// Convert each filtered/flattened row into JSON string
234+
const batchJSON = batchRows.map((row) => JSON.stringify(row)).join(',')
235+
236+
if (isFirstBatch) {
237+
this.push(encoder.encode('[' + batchJSON))
238+
} else {
239+
this.push(encoder.encode(',' + batchJSON))
186240
}
187-
return fullRow
188-
})
189-
190-
const csvString = stringify(paddedRows, {
191-
header: isFirstBatch,
192-
columns: allColumns,
193-
})
241+
}
194242

195-
this.push(encoder.encode(csvString))
196243
isFirstBatch = false
197244
streamPage += 1
198245

199246
if (!result.hasNextPage) {
200247
if (debug) {
201248
req.payload.logger.debug('Stream complete - no more pages')
202249
}
250+
if (!isCSV) {
251+
this.push(encoder.encode(']'))
252+
}
203253
this.push(null) // End the stream
204254
}
205255
},
@@ -239,7 +289,7 @@ export const createExport = async (args: CreateExportArgs) => {
239289

240290
if (isCSV) {
241291
const batchRows = result.docs.map((doc) =>
242-
filterDisabled(flattenObject({ doc, fields, toCSVFunctions })),
292+
filterDisabledCSV(flattenObject({ doc, fields, toCSVFunctions })),
243293
)
244294

245295
// Track discovered column keys
@@ -254,8 +304,8 @@ export const createExport = async (args: CreateExportArgs) => {
254304

255305
rows.push(...batchRows)
256306
} else {
257-
const jsonInput = result.docs.map((doc) => JSON.stringify(doc))
258-
outputData.push(jsonInput.join(',\n'))
307+
const batchRows = result.docs.map((doc) => filterDisabledJSON(doc))
308+
outputData.push(batchRows.map((doc) => JSON.stringify(doc)).join(',\n'))
259309
}
260310

261311
hasNextPage = result.hasNextPage

packages/plugin-import-export/src/index.ts

Lines changed: 48 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,9 @@ import { getExportCollection } from './getExportCollection.js'
1313
import { translations } from './translations/index.js'
1414
import { collectDisabledFieldPaths } from './utilities/collectDisabledFieldPaths.js'
1515
import { getFlattenedFieldKeys } from './utilities/getFlattenedFieldKeys.js'
16+
import { getValueAtPath } from './utilities/getvalueAtPath.js'
17+
import { removeDisabledFields } from './utilities/removeDisabledFields.js'
18+
import { setNestedValue } from './utilities/setNestedValue.js'
1619

1720
export const importExportPlugin =
1821
(pluginConfig: ImportExportPluginConfig) =>
@@ -91,6 +94,7 @@ export const importExportPlugin =
9194
collectionSlug: string
9295
draft?: 'no' | 'yes'
9396
fields?: string[]
97+
format?: 'csv' | 'json'
9498
limit?: number
9599
locale?: string
96100
sort?: any
@@ -120,29 +124,58 @@ export const importExportPlugin =
120124
where,
121125
})
122126

127+
const isCSV = req?.data?.format === 'csv'
123128
const docs = result.docs
124129

125-
const toCSVFunctions = getCustomFieldFunctions({
126-
fields: collection.config.fields as FlattenedField[],
127-
})
130+
let transformed: Record<string, unknown>[] = []
131+
132+
if (isCSV) {
133+
const toCSVFunctions = getCustomFieldFunctions({
134+
fields: collection.config.fields as FlattenedField[],
135+
})
136+
137+
const possibleKeys = getFlattenedFieldKeys(collection.config.fields as FlattenedField[])
128138

129-
const possibleKeys = getFlattenedFieldKeys(collection.config.fields as FlattenedField[])
139+
transformed = docs.map((doc) => {
140+
const row = flattenObject({
141+
doc,
142+
fields,
143+
toCSVFunctions,
144+
})
130145

131-
const transformed = docs.map((doc) => {
132-
const row = flattenObject({
133-
doc,
134-
fields,
135-
toCSVFunctions,
146+
for (const key of possibleKeys) {
147+
if (!(key in row)) {
148+
row[key] = null
149+
}
150+
}
151+
152+
return row
136153
})
154+
} else {
155+
const disabledFields =
156+
collection.config.admin.custom?.['plugin-import-export']?.disabledFields
157+
158+
transformed = docs.map((doc) => {
159+
let output: Record<string, unknown> = { ...doc }
137160

138-
for (const key of possibleKeys) {
139-
if (!(key in row)) {
140-
row[key] = null
161+
// Remove disabled fields first
162+
output = removeDisabledFields(output, disabledFields)
163+
164+
// Then trim to selected fields only (if fields are provided)
165+
if (Array.isArray(fields) && fields.length > 0) {
166+
const trimmed: Record<string, unknown> = {}
167+
168+
for (const key of fields) {
169+
const value = getValueAtPath(output, key)
170+
setNestedValue(trimmed, key, value ?? null)
171+
}
172+
173+
output = trimmed
141174
}
142-
}
143175

144-
return row
145-
})
176+
return output
177+
})
178+
}
146179

147180
return Response.json({
148181
docs: transformed,

packages/plugin-import-export/src/utilities/getFlattenedFieldKeys.ts

Lines changed: 18 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -22,21 +22,18 @@ export const getFlattenedFieldKeys = (fields: FieldWithPresentational[], prefix
2222
'plugin-import-export' in field.custom &&
2323
field.custom['plugin-import-export']?.toCSV
2424

25-
if (!('name' in field) || typeof field.name !== 'string' || fieldHasToCSVFunction) {
26-
return
27-
}
28-
29-
const name = prefix ? `${prefix}_${field.name}` : field.name
25+
const name = 'name' in field && typeof field.name === 'string' ? field.name : undefined
26+
const fullKey = name && prefix ? `${prefix}_${name}` : (name ?? prefix)
3027

3128
switch (field.type) {
3229
case 'array': {
33-
const subKeys = getFlattenedFieldKeys(field.fields as FlattenedField[], `${name}_0`)
30+
const subKeys = getFlattenedFieldKeys(field.fields as FlattenedField[], `${fullKey}_0`)
3431
keys.push(...subKeys)
3532
break
3633
}
3734
case 'blocks': {
3835
field.blocks.forEach((block) => {
39-
const blockPrefix = `${name}_0_${block.slug}`
36+
const blockPrefix = `${fullKey}_0_${block.slug}`
4037
keys.push(`${blockPrefix}_blockType`)
4138
keys.push(`${blockPrefix}_id`)
4239
keys.push(...getFlattenedFieldKeys(block.fields as FlattenedField[], blockPrefix))
@@ -46,45 +43,42 @@ export const getFlattenedFieldKeys = (fields: FieldWithPresentational[], prefix
4643
case 'collapsible':
4744
case 'group':
4845
case 'row':
49-
keys.push(...getFlattenedFieldKeys(field.fields as FlattenedField[], name))
46+
keys.push(...getFlattenedFieldKeys(field.fields as FlattenedField[], fullKey))
5047
break
5148
case 'relationship':
5249
if (field.hasMany) {
5350
if (Array.isArray(field.relationTo)) {
5451
// hasMany polymorphic
55-
keys.push(`${name}_0_relationTo`, `${name}_0_id`)
52+
keys.push(`${fullKey}_0_relationTo`, `${fullKey}_0_id`)
5653
} else {
5754
// hasMany monomorphic
58-
keys.push(`${name}_0`)
55+
keys.push(`${fullKey}_0`)
5956
}
6057
} else {
6158
if (Array.isArray(field.relationTo)) {
6259
// hasOne polymorphic
63-
keys.push(`${name}_relationTo`, `${name}_id`)
60+
keys.push(`${fullKey}_relationTo`, `${fullKey}_id`)
6461
} else {
6562
// hasOne monomorphic
66-
keys.push(name)
63+
keys.push(fullKey)
6764
}
6865
}
6966
break
7067
case 'tabs':
71-
if (field.tabs) {
72-
field.tabs.forEach((tab) => {
73-
if (tab.name) {
74-
const tabPrefix = prefix ? `${prefix}_${tab.name}` : tab.name
75-
keys.push(...getFlattenedFieldKeys(tab.fields, tabPrefix))
76-
} else {
77-
keys.push(...getFlattenedFieldKeys(tab.fields, prefix))
78-
}
79-
})
80-
}
68+
field.tabs?.forEach((tab) => {
69+
const tabPrefix = tab.name ? `${fullKey}_${tab.name}` : fullKey
70+
keys.push(...getFlattenedFieldKeys(tab.fields || [], tabPrefix))
71+
})
8172
break
8273
default:
74+
if (!name || fieldHasToCSVFunction) {
75+
break
76+
}
8377
if ('hasMany' in field && field.hasMany) {
8478
// Push placeholder for first index
85-
keys.push(`${name}_0`)
79+
keys.push(`${fullKey}_0`)
8680
} else {
87-
keys.push(name)
81+
keys.push(fullKey)
8882
}
8983
break
9084
}

0 commit comments

Comments
 (0)