Implement CSV export functionality with UTF-8 encoding (BOM for Excel compatibility). CSV is the simplest format and should be implemented first.
Requirements
Dependencies
CSV Strategy
For multiple types, use ZIP archive with separate CSV files:
- More professional for multi-table exports
- Each table in its own CSV file (employes.csv, pieces_jointes.csv, medias.csv)
- User can extract and open individually in Excel
For single type, export directly as CSV.
CSV Exporter Implementation
// src/core/lib/exporters/csv-exporter.ts
import Papa from 'papaparse';
import archiver from 'archiver';
import * as fs from 'fs';
type ExportType = 'employees' | 'attachments' | 'media';
type ExportData = Partial<Record<ExportType, Employee[] | Attachment[] | Media[]>>;
// Column mappings for each type
const EMPLOYEE_COLUMNS: Record<string, keyof Employee> = {
'ID': 'id',
'Prénom': 'firstName',
'Nom': 'lastName',
'Email': 'email',
'Téléphone': 'phone',
'Statut': 'status',
"Date d'embauche": 'hireDate',
'Créé le': 'createdAt',
};
const ATTACHMENT_COLUMNS: Record<string, keyof Attachment> = {
'ID': 'id',
'ID Employé': 'employeeId',
'Type': 'entityType',
'Nom du fichier': 'originalName',
'Type MIME': 'mimeType',
'Taille': 'size',
'Créé le': 'createdAt',
};
const MEDIA_COLUMNS: Record<string, keyof Media> = {
'ID': 'id',
'Nom': 'name',
'Type': 'type',
'Nom du fichier': 'fileName',
'Type MIME': 'mimeType',
'Créé le': 'createdAt',
};
const FILE_NAMES: Record<ExportType, string> = {
employees: 'employes.csv',
attachments: 'pieces_jointes.csv',
media: 'medias.csv',
};
const getColumns = (type: ExportType) => {
switch (type) {
case 'employees': return EMPLOYEE_COLUMNS;
case 'attachments': return ATTACHMENT_COLUMNS;
case 'media': return MEDIA_COLUMNS;
}
};
const createCsvExporter = () => {
// For small exports (< 1000 records), return buffer
const generate = (data: ExportData): Buffer => {
const rows: Record<string, unknown>[] = [];
for (const [type, records] of Object.entries(data)) {
if (!records || records.length === 0) continue;
const columns = getColumns(type as ExportType);
for (const record of records) {
const row: Record<string, unknown> = {};
for (const [label, key] of Object.entries(columns)) {
row[label] = record[key as keyof typeof record] ?? '';
}
rows.push(row);
}
}
const csv = Papa.unparse(rows, {
quotes: true,
quoteChar: '"',
escapeChar: '"',
delimiter: ';',
header: true,
newline: '\r\n',
});
// Add UTF-8 BOM for Excel compatibility
return Buffer.from('\uFEFF' + csv, 'utf8');
};
// For large exports, use streaming with ZIP
const generateZipStream = async (data: ExportData): Promise<Buffer> => {
return new Promise((resolve, reject) => {
const chunks: Buffer[] = [];
const archive = archiver('zip', { zlib: { level: 9 } });
archive.on('data', chunk => chunks.push(chunk));
archive.on('end', () => resolve(Buffer.concat(chunks)));
archive.on('error', reject);
for (const [type, records] of Object.entries(data)) {
if (!records || records.length === 0) continue;
const columns = getColumns(type as ExportType);
const mapped = records.map(record => {
const row: Record<string, unknown> = {};
for (const [label, key] of Object.entries(columns)) {
row[label] = record[key as keyof typeof record] ?? '';
}
return row;
});
const csv = Papa.unparse(mapped, {
quotes: true,
delimiter: ';',
header: true,
newline: '\r\n',
});
archive.append('\uFEFF' + csv, { name: FILE_NAMES[type as ExportType] });
}
archive.finalize();
});
};
// Streaming version for very large exports
const generateZipStreamToFile = async (data: ExportData, filePath: string): Promise<void> => {
return new Promise((resolve, reject) => {
const output = fs.createWriteStream(filePath);
const archive = archiver('zip', { zlib: { level: 9 } });
output.on('close', resolve);
archive.on('error', reject);
archive.pipe(output);
for (const [type, records] of Object.entries(data)) {
if (!records || records.length === 0) continue;
const columns = getColumns(type as ExportType);
const mapped = records.map(record => {
const row: Record<string, unknown> = {};
for (const [label, key] of Object.entries(columns)) {
row[label] = record[key as keyof typeof record] ?? '';
}
return row;
});
const csv = Papa.unparse(mapped, {
quotes: true,
delimiter: ';',
header: true,
newline: '\r\n',
});
archive.append('\uFEFF' + csv, { name: FILE_NAMES[type as ExportType] });
}
archive.finalize();
});
};
const getExtension = (): string => 'zip';
const getMimeType = (): string => 'application/zip';
return { generate, generateZipStream, generateZipStreamToFile, getExtension, getMimeType };
};
export const csvExporter = createCsvExporter();
Export Flow
// src/core/ipc/handlers/export-handler.ts
import { csvExporter } from '@/lib/exporters/csv-exporter';
import { getEmployeesForExport, getAttachmentsForExport, getMediaForExport } from '@/db/queries/export-queries';
import * as fs from 'fs';
const LARGE_EXPORT_THRESHOLD = 1000;
const handleCsvExport = async (options: ExportOptions) => {
// 1. Get data based on selected types
const data: ExportData = {};
if (options.types.includes('employees')) {
data.employees = getEmployeesForExport(options.dateRange);
}
if (options.types.includes('attachments')) {
data.attachments = getAttachmentsForExport(options.dateRange);
}
if (options.types.includes('media')) {
data.media = getMediaForExport(options.dateRange);
}
const totalRecords = Object.values(data).flat().length;
// 2. Determine format based on types and size
const isMultiType = options.types.length > 1;
const isLargeExport = totalRecords > LARGE_EXPORT_THRESHOLD;
if (isLargeExport) {
// Use streaming to avoid memory issues
return handleLargeCsvExport(options, data);
}
// 3. Show save dialog
const ext = isMultiType ? 'zip' : 'csv';
const { filePath, canceled } = await dialog.showSaveDialog({
title: 'Exporter en CSV',
defaultPath: `export-${Date.now()}.${ext}`,
filters: [
{ name: isMultiType ? 'ZIP Archive' : 'CSV', extensions: [ext] }
],
});
if (canceled || !filePath) {
return { success: false, canceled: true };
}
// 4. Generate and write
if (isMultiType) {
const buffer = await csvExporter.generateZipStream(data);
await fs.promises.writeFile(filePath, buffer);
} else {
const buffer = csvExporter.generate(data);
await fs.promises.writeFile(filePath, buffer);
}
return { success: true, filePath, recordCount: totalRecords };
};
const handleLargeCsvExport = async (options: ExportOptions, data: ExportData) => {
const ext = options.types.length > 1 ? 'zip' : 'csv';
const { filePath, canceled } = await dialog.showSaveDialog({
title: 'Exporter en CSV (gros volume)',
defaultPath: `export-${Date.now()}.${ext}`,
filters: [{ name: ext === 'zip' ? 'ZIP Archive' : 'CSV', extensions: [ext] }],
});
if (canceled || !filePath) {
return { success: false, canceled: true };
}
// Use streaming to file
await csvExporter.generateZipStreamToFile(data, filePath);
const totalRecords = Object.values(data).flat().length;
return { success: true, filePath, recordCount: totalRecords };
};
Encoding Notes
- UTF-8 with BOM (
\uFEFF) is critical for Excel to open CSV correctly without garbled characters
- Use
; delimiter (standard in French/European locales)
- Use
\r\n line endings for Windows compatibility
Error Handling
| Error |
Handling |
| No data to export |
Return warning, empty CSV with headers |
| Write permission denied |
Return error with message |
| Disk full |
Return error with message |
Testing Checklist
Related
Implement CSV export functionality with UTF-8 encoding (BOM for Excel compatibility). CSV is the simplest format and should be implemented first.
Requirements
Dependencies
papaparsepackage for CSV generationarchiverpackage for ZIP archive (for multi-type exports)CSV Strategy
For multiple types, use ZIP archive with separate CSV files:
For single type, export directly as CSV.
CSV Exporter Implementation
Export Flow
Encoding Notes
\uFEFF) is critical for Excel to open CSV correctly without garbled characters;delimiter (standard in French/European locales)\r\nline endings for Windows compatibilityError Handling
Testing Checklist
Related