diff --git a/apps/api/package.json b/apps/api/package.json index 32e7a2fc7..c480575a5 100644 --- a/apps/api/package.json +++ b/apps/api/package.json @@ -1,6 +1,6 @@ { "name": "@impler/api", - "version": "1.9.3", + "version": "1.10.0", "author": "implerhq", "license": "MIT", "private": true, diff --git a/apps/api/src/app/auto-import-jobs-schedular/usecase/auto-import-jobs-schedular.ts b/apps/api/src/app/auto-import-jobs-schedular/usecase/auto-import-jobs-schedular.ts index 16796c47e..c2ecd8dd0 100644 --- a/apps/api/src/app/auto-import-jobs-schedular/usecase/auto-import-jobs-schedular.ts +++ b/apps/api/src/app/auto-import-jobs-schedular/usecase/auto-import-jobs-schedular.ts @@ -17,27 +17,94 @@ export class AutoImportJobsSchedular { @Cron(CronExpression.EVERY_MINUTE) async handleCronSchedular() { - console.log('Cron Running'); - await this.fetchAndExecuteScheduledJobs(); + const startTime = new Date(); + const memUsageStart = process.memoryUsage(); + const cpuUsageStart = process.cpuUsage(); + + console.log('========================================'); + console.log(`[AUTO-IMPORT-JOBS-SCHEDULER] Cron Started at ${startTime.toISOString()}`); + console.log(`[AUTO-IMPORT-JOBS-SCHEDULER] Memory Usage (Start): RSS=${(memUsageStart.rss / 1024 / 1024).toFixed(2)}MB, Heap=${(memUsageStart.heapUsed / 1024 / 1024).toFixed(2)}MB`); + console.log('========================================'); + + try { + await this.fetchAndExecuteScheduledJobs(); + + const endTime = new Date(); + const duration = endTime.getTime() - startTime.getTime(); + const memUsageEnd = process.memoryUsage(); + const cpuUsageEnd = process.cpuUsage(cpuUsageStart); + + console.log('========================================'); + console.log(`[AUTO-IMPORT-JOBS-SCHEDULER] Cron Completed at ${endTime.toISOString()}`); + console.log(`[AUTO-IMPORT-JOBS-SCHEDULER] Duration: ${duration}ms`); + console.log(`[AUTO-IMPORT-JOBS-SCHEDULER] Memory Usage (End): RSS=${(memUsageEnd.rss / 1024 / 1024).toFixed(2)}MB, Heap=${(memUsageEnd.heapUsed / 1024 / 1024).toFixed(2)}MB`); + console.log(`[AUTO-IMPORT-JOBS-SCHEDULER] Memory Delta: RSS=${((memUsageEnd.rss - memUsageStart.rss) / 1024 / 1024).toFixed(2)}MB, Heap=${((memUsageEnd.heapUsed - memUsageStart.heapUsed) / 1024 / 1024).toFixed(2)}MB`); + console.log(`[AUTO-IMPORT-JOBS-SCHEDULER] CPU Usage: User=${(cpuUsageEnd.user / 1000).toFixed(2)}ms, System=${(cpuUsageEnd.system / 1000).toFixed(2)}ms`); + + if (duration > 5000) { + console.warn(`[AUTO-IMPORT-JOBS-SCHEDULER] ⚠️ WARNING: Cron execution took ${duration}ms (>5s threshold)`); + } + console.log('========================================'); + } catch (error) { + const endTime = new Date(); + const duration = endTime.getTime() - startTime.getTime(); + + console.error('========================================'); + console.error(`[AUTO-IMPORT-JOBS-SCHEDULER] ❌ ERROR at ${endTime.toISOString()}`); + console.error(`[AUTO-IMPORT-JOBS-SCHEDULER] Duration before error: ${duration}ms`); + console.error('[AUTO-IMPORT-JOBS-SCHEDULER] Error details:', error); + console.error('========================================'); + } } private async fetchAndExecuteScheduledJobs() { const now = dayjs(); const userJobs = await this.userJobRepository.find({}); + + console.log(`[AUTO-IMPORT-JOBS-SCHEDULER] Total jobs found: ${userJobs.length}`); + + let jobsProcessed = 0; + let jobsSkipped = 0; + let jobsExecuted = 0; for (const userJob of userJobs) { - if (await this.shouldCroneRun({ userJob })) { - try { + const jobStartTime = Date.now(); + try { + if (await this.shouldCroneRun({ userJob })) { if (this.isJobDueNow(userJob.nextRun, now)) { + console.log(`[AUTO-IMPORT-JOBS-SCHEDULER] Job is due now - JobID: ${userJob._id}, Time: ${new Date().toISOString()}`); + const nextScheduledTime = this.calculateNextRun(userJob.cron, userJob.nextRun); - await this.scheduleUpdateNextRun(userJob._id, nextScheduledTime, dayjs(userJob.endsOn)); - + + const executeStartTime = Date.now(); await this.userJobTriggerService.execute(userJob._id); + const executeDuration = Date.now() - executeStartTime; + + jobsExecuted++; + console.log(`[AUTO-IMPORT-JOBS-SCHEDULER] Job executed - JobID: ${userJob._id}, Duration: ${executeDuration}ms`); + + if (executeDuration > 5000) { + console.warn(`[AUTO-IMPORT-JOBS-SCHEDULER] ⚠️ WARNING: Job execution took ${executeDuration}ms (>5s) - JobID: ${userJob._id}`); + } + } else { + jobsSkipped++; } - } catch (error) {} + } else { + jobsSkipped++; + } + + jobsProcessed++; + const jobDuration = Date.now() - jobStartTime; + if (jobDuration > 1000) { + console.log(`[AUTO-IMPORT-JOBS-SCHEDULER] Job processing took ${jobDuration}ms - JobID: ${userJob._id}`); + } + } catch (error) { + console.error(`[AUTO-IMPORT-JOBS-SCHEDULER] ❌ Error processing job ${userJob._id} at ${new Date().toISOString()}`, error); } } + + console.log(`[AUTO-IMPORT-JOBS-SCHEDULER] Summary - Processed: ${jobsProcessed}, Executed: ${jobsExecuted}, Skipped: ${jobsSkipped}`); } calculateNextRun(cronExpression: string, currentNextRun: Date): dayjs.Dayjs { diff --git a/apps/api/src/app/failed-webhook-request-retry/usecase/failed-webhook-request-retry.usecase.ts b/apps/api/src/app/failed-webhook-request-retry/usecase/failed-webhook-request-retry.usecase.ts index b14459628..fad3e99f9 100644 --- a/apps/api/src/app/failed-webhook-request-retry/usecase/failed-webhook-request-retry.usecase.ts +++ b/apps/api/src/app/failed-webhook-request-retry/usecase/failed-webhook-request-retry.usecase.ts @@ -14,25 +14,82 @@ export class FailedWebhookRetry { @Cron(CronExpression.EVERY_5_MINUTES) async processWebhookRetries() { + const startTime = new Date(); + const memUsageStart = process.memoryUsage(); + const cpuUsageStart = process.cpuUsage(); + + console.log('========================================'); + console.log(`[FAILED-WEBHOOK-RETRY] Cron Started at ${startTime.toISOString()}`); + console.log(`[FAILED-WEBHOOK-RETRY] Memory Usage (Start): RSS=${(memUsageStart.rss / 1024 / 1024).toFixed(2)}MB, Heap=${(memUsageStart.heapUsed / 1024 / 1024).toFixed(2)}MB`); + console.log('========================================'); + try { const failedWebhooks: FailedWebhookRetryRequestsEntity[] = await this.failedWebhookRetryRequestsRepository.find({ nextRequestTime: { $lt: new Date() }, }); + console.log(`[FAILED-WEBHOOK-RETRY] Found ${failedWebhooks.length} failed webhooks to retry`); + if (!failedWebhooks.length) { + console.log(`[FAILED-WEBHOOK-RETRY] No webhooks to process, exiting`); return; } - await Promise.allSettled(failedWebhooks.map((wbh) => this.processWebhook(wbh))); + const processStartTime = Date.now(); + const results = await Promise.allSettled(failedWebhooks.map((wbh) => this.processWebhook(wbh))); + const processDuration = Date.now() - processStartTime; + + const successful = results.filter(r => r.status === 'fulfilled').length; + const failed = results.filter(r => r.status === 'rejected').length; + + const endTime = new Date(); + const duration = endTime.getTime() - startTime.getTime(); + const memUsageEnd = process.memoryUsage(); + const cpuUsageEnd = process.cpuUsage(cpuUsageStart); + + console.log('========================================'); + console.log(`[FAILED-WEBHOOK-RETRY] Cron Completed at ${endTime.toISOString()}`); + console.log(`[FAILED-WEBHOOK-RETRY] Results - Successful: ${successful}, Failed: ${failed}, Total: ${failedWebhooks.length}`); + console.log(`[FAILED-WEBHOOK-RETRY] Processing Duration: ${processDuration}ms`); + console.log(`[FAILED-WEBHOOK-RETRY] Total Duration: ${duration}ms`); + console.log(`[FAILED-WEBHOOK-RETRY] Memory Usage (End): RSS=${(memUsageEnd.rss / 1024 / 1024).toFixed(2)}MB, Heap=${(memUsageEnd.heapUsed / 1024 / 1024).toFixed(2)}MB`); + console.log(`[FAILED-WEBHOOK-RETRY] Memory Delta: RSS=${((memUsageEnd.rss - memUsageStart.rss) / 1024 / 1024).toFixed(2)}MB, Heap=${((memUsageEnd.heapUsed - memUsageStart.heapUsed) / 1024 / 1024).toFixed(2)}MB`); + console.log(`[FAILED-WEBHOOK-RETRY] CPU Usage: User=${(cpuUsageEnd.user / 1000).toFixed(2)}ms, System=${(cpuUsageEnd.system / 1000).toFixed(2)}ms`); + + if (duration > 5000) { + console.warn(`[FAILED-WEBHOOK-RETRY] ⚠️ WARNING: Cron execution took ${duration}ms (>5s threshold)`); + } + + if (failedWebhooks.length > 100) { + console.warn(`[FAILED-WEBHOOK-RETRY] ⚠️ WARNING: Processing large batch of ${failedWebhooks.length} webhooks`); + } + + console.log('========================================'); } catch (error) { + const endTime = new Date(); + const duration = endTime.getTime() - startTime.getTime(); + + console.error('========================================'); + console.error(`[FAILED-WEBHOOK-RETRY] ❌ ERROR at ${endTime.toISOString()}`); + console.error(`[FAILED-WEBHOOK-RETRY] Duration before error: ${duration}ms`); + console.error('[FAILED-WEBHOOK-RETRY] Error details:', error); + console.error('========================================'); throw error; } } private async processWebhook(webhook: FailedWebhookRetryRequestsEntity) { + const webhookStartTime = Date.now(); try { + console.log(`[FAILED-WEBHOOK-RETRY] Processing webhook - ID: ${webhook._id}, Time: ${new Date().toISOString()}`); + this.queueService.publishToQueue(QueuesEnum.SEND_FAILED_WEBHOOK_DATA, webhook._id as string); + + const webhookDuration = Date.now() - webhookStartTime; + console.log(`[FAILED-WEBHOOK-RETRY] Webhook queued - ID: ${webhook._id}, Duration: ${webhookDuration}ms`); } catch (error) { + const webhookDuration = Date.now() - webhookStartTime; + console.error(`[FAILED-WEBHOOK-RETRY] ❌ Error processing webhook - ID: ${webhook._id}, Duration: ${webhookDuration}ms, Time: ${new Date().toISOString()}`, error); throw error; } } diff --git a/apps/api/src/app/mapping/usecases/rename-file-headings/rename-file-headings.usecase.ts b/apps/api/src/app/mapping/usecases/rename-file-headings/rename-file-headings.usecase.ts index 47f8a967e..83b3568a1 100644 --- a/apps/api/src/app/mapping/usecases/rename-file-headings/rename-file-headings.usecase.ts +++ b/apps/api/src/app/mapping/usecases/rename-file-headings/rename-file-headings.usecase.ts @@ -14,10 +14,7 @@ export class ReanameFileHeadings { const newHeadings = [...uploadInfo.headings]; templateColumnItems.forEach((mapping) => { - if (!mapping.columnHeading) { - const headingIndex = newHeadings.findIndex((heading) => heading === mapping.key); - if (headingIndex > -1) newHeadings[headingIndex] = '_'; - } else { + if (mapping.columnHeading) { const columnHeadingIndex = newHeadings.findIndex((heading) => heading === mapping.columnHeading); const keyHeadingIndex = newHeadings.findIndex((keyHeading) => keyHeading === mapping.key); if (keyHeadingIndex > -1 && columnHeadingIndex > -1) { diff --git a/apps/api/src/app/review/usecases/do-review/base-review.usecase.ts b/apps/api/src/app/review/usecases/do-review/base-review.usecase.ts index 21843e7aa..d4f58697e 100644 --- a/apps/api/src/app/review/usecases/do-review/base-review.usecase.ts +++ b/apps/api/src/app/review/usecases/do-review/base-review.usecase.ts @@ -454,7 +454,7 @@ export class BaseReview { Papa.parse(csvFileStream, { dynamicTyping: false, skipEmptyLines: 'greedy', - step: (results: Papa.ParseStepResult) => { + step: (results: Papa.ParseStepResult, parser: any) => { totalRecords++; const record = results.data; @@ -477,7 +477,13 @@ export class BaseReview { } else { invalidRecords++; } - dataStream.write(validationResultItem); + const canContinue = dataStream.write(validationResultItem); + if (!canContinue) { + parser.pause(); + dataStream.once('drain', () => { + parser.resume(); + }); + } } }, complete: async () => { diff --git a/apps/api/src/app/shared/services/queue.service.ts b/apps/api/src/app/shared/services/queue.service.ts index 36a9552d3..5a3bcfb54 100644 --- a/apps/api/src/app/shared/services/queue.service.ts +++ b/apps/api/src/app/shared/services/queue.service.ts @@ -17,8 +17,8 @@ export class QueueService { this.connection.on('error', (error: any) => { Logger.error('QueueService RabbitMQ::Error!', error); }); - this.connection.on('disconnect', () => { - Logger.log('QueueService RabbitMQ::Disconnected!'); + this.connection.on('disconnect', ({ error }: { error: any }) => { + Logger.log('QueueService RabbitMQ::Disconnected!', error); this.isQueueConnected = false; }); diff --git a/apps/api/src/app/upload/usecases/uploadcleanup-scheduler/uploadcleanup-scheduler.service.ts b/apps/api/src/app/upload/usecases/uploadcleanup-scheduler/uploadcleanup-scheduler.service.ts index 933c29b7d..c46bcf52d 100644 --- a/apps/api/src/app/upload/usecases/uploadcleanup-scheduler/uploadcleanup-scheduler.service.ts +++ b/apps/api/src/app/upload/usecases/uploadcleanup-scheduler/uploadcleanup-scheduler.service.ts @@ -16,23 +16,59 @@ export class UploadCleanupSchedulerService { @Cron(CRON_SCHEDULE.UPLOAD_CLEANUP_DEFAULT_CRON_TIME) async handleCleanupCronSchedular(cleanupDays: number = CRON_SCHEDULE.UPLOAD_CLEANUP_DAYS) { + const startTime = new Date(); + const memUsageStart = process.memoryUsage(); + const cpuUsageStart = process.cpuUsage(); + + console.log('========================================'); + console.log(`[UPLOAD-CLEANUP-SCHEDULER] Cron Started at ${startTime.toISOString()}`); + console.log(`[UPLOAD-CLEANUP-SCHEDULER] Cleanup days: ${cleanupDays}`); + console.log(`[UPLOAD-CLEANUP-SCHEDULER] Memory Usage (Start): RSS=${(memUsageStart.rss / 1024 / 1024).toFixed(2)}MB, Heap=${(memUsageStart.heapUsed / 1024 / 1024).toFixed(2)}MB`); + console.log('========================================'); + const cleanupDaysAgo = dayjs().subtract(cleanupDays, 'day').toDate(); + console.log(`[UPLOAD-CLEANUP-SCHEDULER] Finding uploads older than: ${cleanupDaysAgo.toISOString()}`); const uploads = await Upload.find({ uploadedDate: { $lt: cleanupDaysAgo }, _uploadedFileId: { $exists: true, $ne: '' }, }); + console.log(`[UPLOAD-CLEANUP-SCHEDULER] Found ${uploads.length} uploads to clean up`); + if (uploads.length === 0) { + console.log(`[UPLOAD-CLEANUP-SCHEDULER] No uploads to clean up, exiting`); return; } + + if (uploads.length > 100) { + console.warn(`[UPLOAD-CLEANUP-SCHEDULER] ⚠️ WARNING: Processing large batch of ${uploads.length} uploads - potential CPU intensive operation`); + } + + let uploadsProcessed = 0; + let uploadsSucceeded = 0; + let uploadsFailed = 0; + let totalFilesDeleted = 0; + let totalCollectionsDropped = 0; for (const upload of uploads) { + const uploadStartTime = Date.now(); try { + console.log(`[UPLOAD-CLEANUP-SCHEDULER] Processing upload - ID: ${upload.id}, UploadedFileID: ${upload._uploadedFileId}`); + const files = await this.fileRepository.find({ _id: upload._uploadedFileId }); + console.log(`[UPLOAD-CLEANUP-SCHEDULER] Found ${files.length} files for upload ${upload.id}`); + + const storagDeleteStart = Date.now(); await this.storageService.deleteFolder(upload.id); + const storagDeleteDuration = Date.now() - storagDeleteStart; + console.log(`[UPLOAD-CLEANUP-SCHEDULER] Storage folder deleted - Upload: ${upload.id}, Duration: ${storagDeleteDuration}ms`); + + if (storagDeleteDuration > 2000) { + console.warn(`[UPLOAD-CLEANUP-SCHEDULER] ⚠️ WARNING: Storage deletion took ${storagDeleteDuration}ms (>2s) - Upload: ${upload.id}`); + } - await Promise.allSettled( + const fileResults = await Promise.allSettled( files.map(async (file) => { try { await Upload.updateOne({ _uploadedFileId: file._id }, { $set: { _uploadedFileId: '' } }); @@ -40,7 +76,11 @@ export class UploadCleanupSchedulerService { // Delete file from storage and db try { await this.fileRepository.delete({ _id: file._id }); - } catch (error) {} + totalFilesDeleted++; + console.log(`[UPLOAD-CLEANUP-SCHEDULER] File deleted - FileID: ${file._id}`); + } catch (error) { + console.error(`[UPLOAD-CLEANUP-SCHEDULER] ❌ Error deleting file - FileID: ${file._id}`, error); + } const collectionName = `${upload._id}-records`; try { @@ -51,12 +91,59 @@ export class UploadCleanupSchedulerService { if (collections.length > 0) { const collection = this.dalService.connection.collection(collectionName); await collection.drop(); + totalCollectionsDropped++; + console.log(`[UPLOAD-CLEANUP-SCHEDULER] Collection dropped - Name: ${collectionName}`); } - } catch (error) {} - } catch (error) {} + } catch (error) { + console.error(`[UPLOAD-CLEANUP-SCHEDULER] ❌ Error dropping collection - Name: ${collectionName}`, error); + } + } catch (error) { + console.error(`[UPLOAD-CLEANUP-SCHEDULER] ❌ Error processing file - FileID: ${file._id}`, error); + throw error; + } }) ); - } catch (error) {} + + const fileSuccesses = fileResults.filter(r => r.status === 'fulfilled').length; + const fileFailures = fileResults.filter(r => r.status === 'rejected').length; + + const uploadDuration = Date.now() - uploadStartTime; + console.log(`[UPLOAD-CLEANUP-SCHEDULER] Upload processed - ID: ${upload.id}, Files: ${files.length}, Success: ${fileSuccesses}, Failed: ${fileFailures}, Duration: ${uploadDuration}ms`); + + if (uploadDuration > 5000) { + console.warn(`[UPLOAD-CLEANUP-SCHEDULER] ⚠️ WARNING: Upload processing took ${uploadDuration}ms (>5s) - Upload: ${upload.id}`); + } + + uploadsSucceeded++; + } catch (error) { + const uploadDuration = Date.now() - uploadStartTime; + console.error(`[UPLOAD-CLEANUP-SCHEDULER] ❌ Error processing upload - ID: ${upload.id}, Duration: ${uploadDuration}ms`, error); + uploadsFailed++; + } finally { + uploadsProcessed++; + } + } + + const endTime = new Date(); + const duration = endTime.getTime() - startTime.getTime(); + const memUsageEnd = process.memoryUsage(); + const cpuUsageEnd = process.cpuUsage(cpuUsageStart); + + console.log('========================================'); + console.log(`[UPLOAD-CLEANUP-SCHEDULER] Cron Completed at ${endTime.toISOString()}`); + console.log(`[UPLOAD-CLEANUP-SCHEDULER] Summary - Total: ${uploadsProcessed}, Success: ${uploadsSucceeded}, Failed: ${uploadsFailed}`); + console.log(`[UPLOAD-CLEANUP-SCHEDULER] Files Deleted: ${totalFilesDeleted}, Collections Dropped: ${totalCollectionsDropped}`); + console.log(`[UPLOAD-CLEANUP-SCHEDULER] Total Duration: ${duration}ms`); + console.log(`[UPLOAD-CLEANUP-SCHEDULER] Memory Usage (End): RSS=${(memUsageEnd.rss / 1024 / 1024).toFixed(2)}MB, Heap=${(memUsageEnd.heapUsed / 1024 / 1024).toFixed(2)}MB`); + console.log(`[UPLOAD-CLEANUP-SCHEDULER] Memory Delta: RSS=${((memUsageEnd.rss - memUsageStart.rss) / 1024 / 1024).toFixed(2)}MB, Heap=${((memUsageEnd.heapUsed - memUsageStart.heapUsed) / 1024 / 1024).toFixed(2)}MB`); + console.log(`[UPLOAD-CLEANUP-SCHEDULER] CPU Usage: User=${(cpuUsageEnd.user / 1000).toFixed(2)}ms, System=${(cpuUsageEnd.system / 1000).toFixed(2)}ms`); + + if (duration > 30000) { + console.error(`[UPLOAD-CLEANUP-SCHEDULER] 🚨 CRITICAL: Cron execution took ${duration}ms (>30s threshold) - HIGH CPU RISK!`); + } else if (duration > 10000) { + console.warn(`[UPLOAD-CLEANUP-SCHEDULER] ⚠️ WARNING: Cron execution took ${duration}ms (>10s threshold)`); } + + console.log('========================================'); } } diff --git a/apps/queue-manager/package.json b/apps/queue-manager/package.json index 27766fdb7..f2baa8f17 100644 --- a/apps/queue-manager/package.json +++ b/apps/queue-manager/package.json @@ -1,6 +1,6 @@ { "name": "@impler/queue-manager", - "version": "1.9.3", + "version": "1.10.0", "author": "implerhq", "license": "MIT", "private": true, diff --git a/apps/queue-manager/src/consumers/end-import.consumer.ts b/apps/queue-manager/src/consumers/end-import.consumer.ts index 2c9f8b4a3..79cbe778d 100644 --- a/apps/queue-manager/src/consumers/end-import.consumer.ts +++ b/apps/queue-manager/src/consumers/end-import.consumer.ts @@ -1,3 +1,5 @@ +import * as Sentry from '@sentry/node'; +import { Readable } from 'stream'; import { DalService, FileRepository, UploadRepository } from '@impler/dal'; import { QueuesEnum, EndImportData, FileMimeTypesEnum, DestinationsEnum } from '@impler/shared'; import { FileNameService, PaymentAPIService, StorageService } from '@impler/services'; @@ -17,59 +19,69 @@ export class EndImportConsumer extends BaseConsumer { async message(message: { content: string }) { const data = JSON.parse(message.content) as EndImportData; - await this.convertRecordsToJsonFile(data.uploadId, data.uploadedFileId); - const userEmail = await this.uploadRepository.getUserEmailFromUploadId(data.uploadId); - const dataProcessingAllowed = await this.paymentAPIService.checkEvent({ - email: userEmail, - }); + try { + await this.convertRecordsToJsonFile(data.uploadId, data.uploadedFileId); + const userEmail = await this.uploadRepository.getUserEmailFromUploadId(data.uploadId); - if (dataProcessingAllowed) { - if (data.destination === DestinationsEnum.WEBHOOK) { - publishToQueue(QueuesEnum.SEND_WEBHOOK_DATA, { - uploadId: data.uploadId, - }); - } else if (data.destination === DestinationsEnum.BUBBLEIO) { - publishToQueue(QueuesEnum.SEND_BUBBLE_DATA, { - uploadId: data.uploadId, - }); + const dataProcessingAllowed = await this.paymentAPIService.checkEvent({ + email: userEmail, + }); + + if (dataProcessingAllowed) { + if (data.destination === DestinationsEnum.WEBHOOK) { + publishToQueue(QueuesEnum.SEND_WEBHOOK_DATA, { + uploadId: data.uploadId, + }); + } else if (data.destination === DestinationsEnum.BUBBLEIO) { + publishToQueue(QueuesEnum.SEND_BUBBLE_DATA, { + uploadId: data.uploadId, + }); + } } + } catch (error) { + Sentry.captureException(error); } } private async convertRecordsToJsonFile(uploadId: string, uploadedFileId?: string): Promise { - const importData = await this.dalService.getAllRecords(uploadId); - const allJsonDataFilePath = this.fileNameService.getAllJsonDataFilePath(uploadId); - await this.storageService.uploadFile(allJsonDataFilePath, JSON.stringify(importData), FileMimeTypesEnum.JSON); - await this.dalService.dropRecordCollection(uploadId); + try { + const jsonStream = this.dalService.getRecordsStream(uploadId); - if (!uploadedFileId) { - const csvData = await this.convertImportDataToCsv(importData); - const importedFileName = this.fileNameService.getImportedFileName(uploadId); - const filePath = this.fileNameService.getImportedFilePath(uploadId); - const uploadedFileEntry = await this.fileRepository.create({ - mimeType: FileMimeTypesEnum.CSV, - name: importedFileName, - originalName: importedFileName, - path: filePath, - }); - await this.storageService.uploadFile(filePath, csvData, FileMimeTypesEnum.CSV); - await this.uploadRepository.findOneAndUpdate( - { - _id: uploadId, - }, - { - $set: { - _uploadedFileId: uploadedFileEntry._id, - originalFileName: importedFileName, - originalFileType: FileMimeTypesEnum.CSV, + const allJsonDataFilePath = this.fileNameService.getAllJsonDataFilePath(uploadId); + await this.storageService.uploadFile(allJsonDataFilePath, jsonStream, FileMimeTypesEnum.JSON); + await this.dalService.dropRecordCollection(uploadId); + + if (!uploadedFileId) { + const csvData = await this.convertImportDataToCsv(jsonStream); + const importedFileName = this.fileNameService.getImportedFileName(uploadId); + const filePath = this.fileNameService.getImportedFilePath(uploadId); + const uploadedFileEntry = await this.fileRepository.create({ + mimeType: FileMimeTypesEnum.CSV, + name: importedFileName, + originalName: importedFileName, + path: filePath, + }); + await this.storageService.uploadFile(filePath, csvData, FileMimeTypesEnum.CSV); + await this.uploadRepository.findOneAndUpdate( + { + _id: uploadId, }, - } - ); + { + $set: { + _uploadedFileId: uploadedFileEntry._id, + originalFileName: importedFileName, + originalFileType: FileMimeTypesEnum.CSV, + }, + } + ); + } + } catch (error) { + Sentry.captureException(error); } } - async convertImportDataToCsv(importData) { + async convertImportDataToCsv(importData: Readable) { const recordsData = importData.map((item) => item.record); const csv = Papa.unparse(recordsData, { diff --git a/apps/queue-manager/src/consumers/send-webhook-data.consumer.ts b/apps/queue-manager/src/consumers/send-webhook-data.consumer.ts index afab87262..5549847f6 100644 --- a/apps/queue-manager/src/consumers/send-webhook-data.consumer.ts +++ b/apps/queue-manager/src/consumers/send-webhook-data.consumer.ts @@ -48,82 +48,88 @@ export class SendWebhookDataConsumer extends BaseConsumer { const uploadId = data.uploadId; const isRetry = data.isRetry || false; - const cachedData = data.cache || (await this.getInitialCachedData(uploadId)); + try { + const cachedData = data.cache || (await this.getInitialCachedData(uploadId)); - if (cachedData && cachedData.callbackUrl) { - // Get valid data information - let allDataJson: null | any[] = null; - if (cachedData.allDataFilePath) { - const allDataContent = await this.storageService.getFileContent( - cachedData.allDataFilePath, - FileEncodingsEnum.JSON - ); - allDataJson = JSON.parse(allDataContent); - } - if (!(Array.isArray(allDataJson) && allDataJson.length > 0)) return; - const { sendData, page } = this.buildSendData({ - uploadId, - data: allDataJson, - extra: cachedData.extra, - template: cachedData.name, - fileName: cachedData.fileName, - chunkSize: cachedData.chunkSize, - defaultValues: cachedData.defaultValues, - page: cachedData.page || DEFAULT_PAGE, - recordFormat: cachedData.recordFormat, - chunkFormat: cachedData.chunkFormat, - totalRecords: allDataJson.length, - imageHeadings: cachedData.imageHeadings, - multiSelectHeadings: cachedData.multiSelectHeadings, - }); + if (cachedData && cachedData.callbackUrl) { + // Get valid data information + let allDataJson: null | any[] = null; + if (cachedData.allDataFilePath) { + const allDataContent = await this.storageService.getFileContent( + cachedData.allDataFilePath, + FileEncodingsEnum.JSON + ); + allDataJson = JSON.parse(allDataContent); + } + if (!(Array.isArray(allDataJson) && allDataJson.length > 0)) return; - const headers = - cachedData.authHeaderName && cachedData.authHeaderValue - ? { [cachedData.authHeaderName]: cachedData.authHeaderValue } - : null; + const totalPages = this.getTotalPages(allDataJson.length, cachedData.chunkSize); + let currentPage = cachedData.page || DEFAULT_PAGE; + const startPage = currentPage; + const PAGES_TO_PROCESS = 50; // Process 50 pages per message to speed up but allow heartbeats - const allData = { - data: sendData, - uploadId, - page, - method: 'POST', - url: cachedData.callbackUrl, - headers, - isRetry, - }; + while (currentPage <= totalPages && currentPage < startPage + PAGES_TO_PROCESS) { + const { sendData, page } = this.buildSendData({ + uploadId, + data: allDataJson, + extra: cachedData.extra, + template: cachedData.name, + fileName: cachedData.fileName, + chunkSize: cachedData.chunkSize, + defaultValues: cachedData.defaultValues, + page: currentPage, + recordFormat: cachedData.recordFormat, + chunkFormat: cachedData.chunkFormat, + totalRecords: allDataJson.length, + imageHeadings: cachedData.imageHeadings, + multiSelectHeadings: cachedData.multiSelectHeadings, + }); - const response = await this.makeApiCall(allData); + const headers = + cachedData.authHeaderName && cachedData.authHeaderValue + ? { [cachedData.authHeaderName]: cachedData.authHeaderValue } + : null; - await this.makeResponseEntry({ - data: response, - projectId: cachedData.projectId, - importName: cachedData.name, - url: cachedData.callbackUrl, - retryInterval: cachedData.retryInterval, - retryCount: cachedData.retryCount, - allData, - }); + const allData = { + data: sendData, + uploadId, + page, + method: 'POST', + url: cachedData.callbackUrl, + headers, + isRetry, + }; - const nextPageNumber = this.getNextPageNumber({ - totalRecords: allDataJson.length, - currentPage: page, - chunkSize: cachedData.chunkSize, - }); + const response = await this.makeApiCall(allData); - if (nextPageNumber) { - // Make next call - publishToQueue(QueuesEnum.SEND_WEBHOOK_DATA, { - uploadId, - cache: { - ...cachedData, - page: nextPageNumber, - }, - }); - } else { - // Processing is done - this.finalizeUpload(uploadId); + await this.makeResponseEntry({ + data: response, + projectId: cachedData.projectId, + importName: cachedData.name, + url: cachedData.callbackUrl, + retryInterval: cachedData.retryInterval, + retryCount: cachedData.retryCount, + allData, + }); + + currentPage += 1; + } + + if (currentPage <= totalPages) { + // Queue next batch + publishToQueue(QueuesEnum.SEND_WEBHOOK_DATA, { + uploadId, + cache: { + ...cachedData, + page: currentPage, + }, + }); + } else { + // Processing is done + this.finalizeUpload(uploadId); + } } - } + } catch (error) {} } private buildSendData({ diff --git a/apps/web/assets/images/companies/nirvana.svg b/apps/web/assets/images/companies/nirvana.svg deleted file mode 100644 index 6d2bd08c8..000000000 --- a/apps/web/assets/images/companies/nirvana.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/apps/web/components/home/PlanDetails/ActiveSubscriptionDetails.tsx b/apps/web/components/home/PlanDetails/ActiveSubscriptionDetails.tsx index e5a5338a3..629387d74 100644 --- a/apps/web/components/home/PlanDetails/ActiveSubscriptionDetails.tsx +++ b/apps/web/components/home/PlanDetails/ActiveSubscriptionDetails.tsx @@ -2,7 +2,7 @@ import React from 'react'; import Link from 'next/link'; import dayjs from 'dayjs'; import { useCustomerPortal } from 'subos-frontend'; -import { Stack, Group, ActionIcon, Menu, Alert } from '@mantine/core'; +import { Stack, Group, ActionIcon, Menu, Alert, Button } from '@mantine/core'; import { ActionsEnum, colors, DATE_FORMATS, PLANCODEENUM, SubjectsEnum } from '@config'; import { ISubscriptionData, numberFormatter } from '@impler/shared'; @@ -51,49 +51,52 @@ export function ActiveSubscriptionDetails({ - - - - - - + + + + + + + + + + - - - } - style={{ color: colors.StrokeLight }} - > - View All Transactions - - - - - openCustomerPortal(profileInfo!.email!)} - icon={} - style={{ color: colors.StrokeLight }} - > - Change Card - - - - - {!activePlanDetails?.plan?.canceledOn && activePlanDetails?.plan?.code !== PLANCODEENUM.STARTER && ( + + } - onClick={openCancelPlanModal} - style={{ color: colors.danger }} + component={Link} + href="/transactions" + icon={} + style={{ color: colors.StrokeLight }} > - Cancel Subscription + View All Transactions - )} - - - + + + + {!activePlanDetails?.plan?.canceledOn && activePlanDetails?.plan?.code !== PLANCODEENUM.STARTER && ( + } + onClick={openCancelPlanModal} + style={{ color: colors.danger }} + > + Cancel Subscription + + )} + + + + diff --git a/apps/web/config/constants.config.ts b/apps/web/config/constants.config.ts index 9b944f385..b6b5622ca 100644 --- a/apps/web/config/constants.config.ts +++ b/apps/web/config/constants.config.ts @@ -821,7 +821,6 @@ export const companyLogos = [ { id: 'aklamio', src: AklamioLogo, alt: 'Aklamio' }, { id: 'artha', src: ArthaLogo, alt: 'Artha' }, { id: 'nasscom', src: NasscomLogo, alt: 'Nasscom' }, - { id: 'nirvana', src: NirvanaLogo, alt: 'Nirvana' }, { id: 'omniva', src: OmnivaLogo, alt: 'Omniva' }, { id: 'orbit', src: OrbitLogo, alt: 'Orbit' }, { id: 'ubico', src: UbicoLogo, alt: 'Ubico' }, diff --git a/apps/web/layouts/OnboardLayout/LeftSideContent.tsx b/apps/web/layouts/OnboardLayout/LeftSideContent.tsx index 51cea2d95..dd99a8b8c 100644 --- a/apps/web/layouts/OnboardLayout/LeftSideContent.tsx +++ b/apps/web/layouts/OnboardLayout/LeftSideContent.tsx @@ -114,18 +114,18 @@ export function LeftSideContent() { /> - {/* First row: 6 logos */} + {/* First row: 4 logos */} - {companyLogos.slice(0, 6).map((company: any) => ( + {companyLogos.slice(0, 4).map((company: any) => ( {company.alt} ))} - {/* Second row: 2 logos (centered) */} + {/* Second row: 3 logos (centered) */} - {companyLogos.slice(6, 8).map((company: any) => ( + {companyLogos.slice(4, 7).map((company: any) => ( {company.alt} diff --git a/apps/web/package.json b/apps/web/package.json index a2d3ba572..1839e50fe 100644 --- a/apps/web/package.json +++ b/apps/web/package.json @@ -1,6 +1,6 @@ { "name": "@impler/web", - "version": "1.9.3", + "version": "1.10.0", "author": "implerhq", "license": "MIT", "private": true, diff --git a/apps/web/pages/index.tsx b/apps/web/pages/index.tsx index d2b9940fb..fdcfd260c 100644 --- a/apps/web/pages/index.tsx +++ b/apps/web/pages/index.tsx @@ -7,6 +7,7 @@ import { AppLayout } from '@layouts/AppLayout'; import { PlanDetails } from '@components/home/PlanDetails'; import { PlanPricingTable } from '@components/UpgradePlan/Plans/PlansPricingTable'; import { useAppState } from 'store/app.context'; +// Initializing public runtime configuration const { publicRuntimeConfig } = getConfig(); export default function Home() { diff --git a/apps/widget/package.json b/apps/widget/package.json index 0393a27e0..7b7116b2e 100644 --- a/apps/widget/package.json +++ b/apps/widget/package.json @@ -1,6 +1,6 @@ { "name": "@impler/widget", - "version": "1.9.3", + "version": "1.10.0", "author": "implerhq", "license": "MIT", "private": true, diff --git a/apps/widget/src/hooks/Phase1/usePhase1.ts b/apps/widget/src/hooks/Phase1/usePhase1.ts index 3c1634560..c2645d42f 100644 --- a/apps/widget/src/hooks/Phase1/usePhase1.ts +++ b/apps/widget/src/hooks/Phase1/usePhase1.ts @@ -111,11 +111,18 @@ export function usePhase1({ goNext, texts, onManuallyEnterData }: IUsePhase1Prop { onSuccess(uploadData, uploadValues) { ParentWindow.UploadStarted({ templateId: uploadData._templateId, uploadId: uploadData._id }); + ParentWindow.UploadSuccess({ uploadId: uploadData._id, rowCount: uploadData.totalRecords }); + ParentWindow.UploadStatusSuccess({ uploadId: uploadData._id, rowCount: uploadData.totalRecords }); setUploadInfo(uploadData); if (uploadValues.file) goNext(); else onManuallyEnterData(); }, onError(error: IErrorObject) { + ParentWindow.UploadError({ + errorCode: error.statusCode || 500, + errorMessage: error.message || 'Unknown Error', + }); + ParentWindow.UploadStatusError(error); resetField('file'); setError('file', { type: 'file', @@ -129,7 +136,6 @@ export function usePhase1({ goNext, texts, onManuallyEnterData }: IUsePhase1Prop string[], IErrorObject, { file: File } - // eslint-disable-next-line prettier/prettier >(['getExcelSheetNames'], (excelSheetFile) => api.getExcelSheetNames(excelSheetFile), { onSuccess(sheetNames) { if (sheetNames.length <= 1) { diff --git a/apps/widget/src/util/parent-window.ts b/apps/widget/src/util/parent-window.ts index d6e63ad98..8321a5b10 100644 --- a/apps/widget/src/util/parent-window.ts +++ b/apps/widget/src/util/parent-window.ts @@ -28,3 +28,45 @@ export function DataImported(value: Record[]) { export function ImportJobCreated(value: IUserJob) { window.parent.postMessage({ type: EventTypesEnum.IMPORT_JOB_CREATED, value }, '*'); } +export function UploadSuccess(value: { uploadId: string; rowCount: number }) { + window.parent.postMessage({ type: EventTypesEnum.UPLOAD_SUCCESS, value }, '*'); +} +export function UploadError(value: { errorCode: number; errorMessage: string }) { + window.parent.postMessage({ type: EventTypesEnum.UPLOAD_ERROR, value }, '*'); +} + +export function UploadStatusSuccess(value: { uploadId: string; rowCount: number }) { + window.parent.postMessage( + { + source: 'impler-embed', + type: 'UPLOAD_STATUS', + payload: { + status: 'SUCCESS', + meta: { + uploadId: value.uploadId, + rowCount: value.rowCount, + timestamp: new Date().toISOString(), + }, + }, + }, + '*' + ); +} + +export function UploadStatusError(value: { message?: string; error?: string; statusCode?: number }) { + window.parent.postMessage( + { + source: 'impler-embed', + type: 'UPLOAD_STATUS', + payload: { + status: 'ERROR', + meta: { + errorCode: value.statusCode || 500, + errorMessage: value.message || value.error || 'Unknown Network Error', + timestamp: new Date().toISOString(), + }, + }, + }, + '*' + ); +} diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 19afe8a29..f08889158 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -18,7 +18,7 @@ services: api: privileged: true - image: 'ghcr.io/implerhq/impler/api:1.9.3' + image: 'ghcr.io/implerhq/impler/api:1.10.0' depends_on: - mongodb - rabbitmq @@ -56,7 +56,7 @@ services: - impler queue-manager: - image: 'ghcr.io/implerhq/impler/queue-manager:1.9.3' + image: 'ghcr.io/implerhq/impler/queue-manager:1.10.0' depends_on: - api - rabbitmq @@ -84,7 +84,7 @@ services: - impler widget: - image: 'ghcr.io/implerhq/impler/widget:1.9.3' + image: 'ghcr.io/implerhq/impler/widget:1.10.0' depends_on: - api container_name: widget @@ -102,7 +102,7 @@ services: embed: depends_on: - widget - image: 'ghcr.io/implerhq/impler/embed:1.9.3' + image: 'ghcr.io/implerhq/impler/embed:1.10.0' container_name: embed environment: WIDGET_URL: ${WIDGET_BASE_URL} @@ -114,7 +114,7 @@ services: web: depends_on: - api - image: 'ghcr.io/implerhq/impler/web:1.9.3' + image: 'ghcr.io/implerhq/impler/web:1.10.0' container_name: web environment: NEXT_PUBLIC_API_BASE_URL: ${API_ROOT_URL} diff --git a/lerna.json b/lerna.json index 83d49f111..81d87b766 100644 --- a/lerna.json +++ b/lerna.json @@ -3,5 +3,5 @@ "npmClient": "pnpm", "useNx": true, "packages": ["apps/*", "libs/*", "packages/*"], - "version": "1.9.3" + "version": "1.10.0" } diff --git a/libs/dal/package.json b/libs/dal/package.json index b8df74615..19b9dd5f2 100644 --- a/libs/dal/package.json +++ b/libs/dal/package.json @@ -1,6 +1,6 @@ { "name": "@impler/dal", - "version": "1.9.3", + "version": "1.10.0", "author": "implerhq", "license": "MIT", "main": "dist/index.js", diff --git a/libs/dal/src/dal.service.ts b/libs/dal/src/dal.service.ts index 811716282..d52f12f2d 100644 --- a/libs/dal/src/dal.service.ts +++ b/libs/dal/src/dal.service.ts @@ -1,4 +1,5 @@ import { Connection, ConnectOptions } from 'mongoose'; +import { Readable } from 'stream'; import * as mongoose from 'mongoose'; import { RecordEntity, RecordSchema } from './repositories/record'; @@ -134,6 +135,55 @@ export class DalService { return model.find({}, 'index isValid errors warnings record'); } + + getRecordsStream(_uploadId: string) { + const model = this.getRecordCollection(_uploadId); + if (!model) throw new Error('Record collection not found'); + + const cursor = model.find({}, 'index isValid errors warnings record').cursor(); + + const jsonStream = new Readable({ + read() { + // controlled by cursor events + }, + }); + + let isFirst = true; + + cursor.on('data', (document) => { + let chunk = JSON.stringify(document); + if (isFirst) { + chunk = '[' + chunk; + isFirst = false; + } else { + chunk = ',' + chunk; + } + const canContinue = jsonStream.push(chunk); + if (!canContinue) { + cursor.pause(); + } + }); + + cursor.on('end', () => { + if (isFirst) { + jsonStream.push('[]'); + } else { + jsonStream.push(']'); + } + jsonStream.push(null); + }); + + cursor.on('error', (err) => { + jsonStream.emit('error', err); + }); + + jsonStream._read = () => { + cursor.resume(); + }; + + return jsonStream; + } + getFieldData(_uploadId: string, fields: string[]) { const model = this.getRecordCollection(_uploadId); if (!model) return; diff --git a/libs/embed/package.json b/libs/embed/package.json index aa3be5b45..4efe75680 100644 --- a/libs/embed/package.json +++ b/libs/embed/package.json @@ -1,6 +1,6 @@ { "name": "@impler/embed", - "version": "1.9.3", + "version": "1.10.0", "private": true, "license": "MIT", "author": "implerhq", diff --git a/libs/embed/src/shared/eventTypes.ts b/libs/embed/src/shared/eventTypes.ts index 06fa1ae17..eb07e2732 100644 --- a/libs/embed/src/shared/eventTypes.ts +++ b/libs/embed/src/shared/eventTypes.ts @@ -7,6 +7,10 @@ export enum EventTypesEnum { UPLOAD_STARTED = 'UPLOAD_STARTED', UPLOAD_TERMINATED = 'UPLOAD_TERMINATED', UPLOAD_COMPLETED = 'UPLOAD_COMPLETED', + UPLOAD_SUCCESS = 'UPLOAD_SUCCESS', + UPLOAD_ERROR = 'UPLOAD_ERROR', + DATA_IMPORTED = 'DATA_IMPORTED', + IMPORT_JOB_CREATED = 'IMPORT_JOB_CREATED', } export enum WidgetEventTypesEnum { diff --git a/libs/services/package.json b/libs/services/package.json index c7db8cacf..697c5aed7 100644 --- a/libs/services/package.json +++ b/libs/services/package.json @@ -1,6 +1,6 @@ { "name": "@impler/services", - "version": "1.9.3", + "version": "1.10.0", "description": "Reusable services to shared between backend api and queue-manager", "license": "MIT", "author": "implerhq", diff --git a/libs/shared/package.json b/libs/shared/package.json index 8f7b2d832..3216ab429 100644 --- a/libs/shared/package.json +++ b/libs/shared/package.json @@ -1,6 +1,6 @@ { "name": "@impler/shared", - "version": "1.9.3", + "version": "1.10.0", "description": "Reusable types and classes to shared between apps and libraries", "license": "MIT", "author": "implerhq", diff --git a/libs/shared/src/types/widget/widget.types.ts b/libs/shared/src/types/widget/widget.types.ts index 91a0d531f..aa8fa6241 100644 --- a/libs/shared/src/types/widget/widget.types.ts +++ b/libs/shared/src/types/widget/widget.types.ts @@ -75,6 +75,8 @@ export enum EventTypesEnum { UPLOAD_STARTED = 'UPLOAD_STARTED', UPLOAD_TERMINATED = 'UPLOAD_TERMINATED', UPLOAD_COMPLETED = 'UPLOAD_COMPLETED', + UPLOAD_SUCCESS = 'UPLOAD_SUCCESS', + UPLOAD_ERROR = 'UPLOAD_ERROR', DATA_IMPORTED = 'DATA_IMPORTED', IMPORT_JOB_CREATED = 'IMPORT_JOB_CREATED', } diff --git a/package.json b/package.json index b06d2c9f0..1d8e59fdd 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "impler.io", - "version": "1.9.3", + "version": "1.9.4", "description": "Open source infrastructure to import data easily", "packageManager": "pnpm@8.9.0", "private": true, diff --git a/packages/angular/package.json b/packages/angular/package.json index e84b25225..0b457c667 100644 --- a/packages/angular/package.json +++ b/packages/angular/package.json @@ -1,6 +1,6 @@ { "name": "@impler/angular", - "version": "1.9.3", + "version": "1.10.0", "description": "Angular library to show CSV Excel Importer in angular applications", "license": "MIT", "author": "implerhq", diff --git a/packages/client/package.json b/packages/client/package.json index e0105fc69..ca9ed2e5a 100644 --- a/packages/client/package.json +++ b/packages/client/package.json @@ -1,6 +1,6 @@ { "name": "@impler/client", - "version": "1.9.3", + "version": "1.10.0", "description": "API client to be used in end user environments", "license": "MIT", "author": "implerhq", diff --git a/packages/client/src/types.ts b/packages/client/src/types.ts index 262284541..600bcae56 100644 --- a/packages/client/src/types.ts +++ b/packages/client/src/types.ts @@ -31,6 +31,8 @@ export const EventTypes = { UPLOAD_STARTED: 'UPLOAD_STARTED', UPLOAD_TERMINATED: 'UPLOAD_TERMINATED', UPLOAD_COMPLETED: 'UPLOAD_COMPLETED', + UPLOAD_SUCCESS: 'UPLOAD_SUCCESS', + UPLOAD_ERROR: 'UPLOAD_ERROR', DATA_IMPORTED: 'DATA_IMPORTED', IMPORT_JOB_CREATED: 'IMPORT_JOB_CREATED', } as const; @@ -135,6 +137,8 @@ export type UploadTemplateData = { templateId: string; }; export type UploadData = { uploadId: string }; +export type UploadSuccessData = { uploadId: string; rowCount: number }; +export type UploadErrorData = { errorCode: number; errorMessage: string }; export type EventCalls = | { @@ -157,6 +161,14 @@ export type EventCalls = type: typeof EventTypes.IMPORT_JOB_CREATED; value: IUserJob; } + | { + type: typeof EventTypes.UPLOAD_SUCCESS; + value: UploadSuccessData; + } + | { + type: typeof EventTypes.UPLOAD_ERROR; + value: UploadErrorData; + } | { type: typeof EventTypes.CLOSE_WIDGET; } @@ -236,7 +248,10 @@ export interface IUseImplerProps { onUploadStart?: (value: UploadTemplateData) => void; onUploadTerminate?: (value: UploadData) => void; onUploadComplete?: (value: IUpload) => void; + onUploadSuccess?: (value: UploadSuccessData) => void; onDataImported?: (importedData: Record[]) => void; onWidgetClose?: () => void; onImportJobCreated?: (jobInfo: IUserJob) => void; + onWidgetReady?: () => void; + onUploadError?: (error: UploadErrorData) => void; } diff --git a/packages/react/package.json b/packages/react/package.json index 06b3638c1..505637084 100644 --- a/packages/react/package.json +++ b/packages/react/package.json @@ -1,6 +1,6 @@ { "name": "@impler/react", - "version": "1.9.3", + "version": "1.10.0", "description": "React library to show CSV Excel Importer in react applications", "license": "MIT", "author": "implerhq", diff --git a/packages/react/src/hooks/useImpler.ts b/packages/react/src/hooks/useImpler.ts index bbd0797b8..945371594 100644 --- a/packages/react/src/hooks/useImpler.ts +++ b/packages/react/src/hooks/useImpler.ts @@ -1,4 +1,4 @@ -import { useCallback, useEffect, useState } from 'react'; +import { useCallback, useEffect, useRef, useState } from 'react'; import { isObject, EventTypes, logError, EventCalls, IShowWidgetProps, IUseImplerProps } from '@impler/client'; export function useImpler({ @@ -19,13 +19,29 @@ export function useImpler({ onDataImported, onUploadTerminate, onImportJobCreated, + onWidgetReady, + onUploadSuccess, + onUploadError, }: IUseImplerProps) { const [uuid] = useState(generateUuid()); const [isImplerInitiated, setIsImplerInitiated] = useState(false); + const onWidgetReadyCalledRef = useRef(false); + + /* + * Use a ref so the listener always calls the latest version of callbacks + * without needing to re-register with the embed library + */ + const onEventHappenRef = useRef<(eventData: EventCalls) => void>(() => {}); const onEventHappen = useCallback( (eventData: EventCalls) => { switch (eventData.type) { + case EventTypes.WIDGET_READY: + if (onWidgetReady && !onWidgetReadyCalledRef.current) { + onWidgetReadyCalledRef.current = true; + onWidgetReady(); + } + break; case EventTypes.UPLOAD_STARTED: if (onUploadStart) onUploadStart(eventData.value); break; @@ -35,6 +51,12 @@ export function useImpler({ case EventTypes.UPLOAD_COMPLETED: if (onUploadComplete) onUploadComplete(eventData.value); break; + case EventTypes.UPLOAD_SUCCESS: + if (onUploadSuccess) onUploadSuccess(eventData.value); + break; + case EventTypes.UPLOAD_ERROR: + if (onUploadError) onUploadError(eventData.value); + break; case EventTypes.DATA_IMPORTED: if (onDataImported) onDataImported(eventData.value); break; @@ -46,13 +68,32 @@ export function useImpler({ break; } }, - [onUploadComplete, onUploadStart, onUploadTerminate, onWidgetClose] + [ + onUploadComplete, + onUploadStart, + onUploadTerminate, + onWidgetClose, + onDataImported, + onImportJobCreated, + onWidgetReady, + onUploadSuccess, + onUploadError, + ] ); + // Keep ref in sync with latest callback + useEffect(() => { + onEventHappenRef.current = onEventHappen; + }, [onEventHappen]); + useEffect(() => { const readyCheckInterval = setInterval(() => { if (window.impler && window.impler.isReady()) { setIsImplerInitiated(true); + if (onWidgetReady && !onWidgetReadyCalledRef.current) { + onWidgetReadyCalledRef.current = true; + onWidgetReady(); + } clearInterval(readyCheckInterval); } }, 1000); @@ -66,7 +107,11 @@ export function useImpler({ if (!window.impler) logError('IMPLER_UNDEFINED_ERROR'); else { window.impler.init(uuid); - window.impler.on('message', onEventHappen, uuid); + /* + * Use a stable wrapper that delegates to the ref, + * so the embed always calls the latest version of onEventHappen + */ + window.impler.on('message', (data: EventCalls) => onEventHappenRef.current(data), uuid); } }, []);