From 64a1504e8fe5aa791852e4c2d7813a993b4350cf Mon Sep 17 00:00:00 2001 From: Andrew Boni Signori <61259237+andrewsignori-aot@users.noreply.github.com> Date: Tue, 17 Dec 2024 14:11:13 -0800 Subject: [PATCH 1/3] #4130 - Update Openshift Github Actions (#4131) - Added the [openshift-tools-installer](https://github.com/redhat-actions/openshift-tools-installer) to all action steps that require some OC iteration. - Using the [default options](https://github.com/redhat-actions/openshift-tools-installer?tab=readme-ov-file#inputs). - Using the fixed version "4". The current version printed is `OC CLI Version: Client Version: 4.17.9`. Please note, for steps printing the `OC version`, the new action was added before the print step. For steps not printing the `OC version`, the new step was added right before the "Log in to OpenShift". Sample workflow execution with the new step working: https://github.com/bcgov/SIMS/actions/runs/12378896945/job/34551860119 --- .github/workflows/clamav.yml | 4 ++ .github/workflows/crunchy-db.yml | 4 ++ .../env-setup-build-forms-server.yml | 5 +++ .github/workflows/env-setup-delete-redis.yml | 4 ++ .../env-setup-deploy-forms-server.yml | 5 +++ .github/workflows/env-setup-deploy-redis.yml | 4 ++ .../workflows/env-setup-deploy-secrets.yml | 4 ++ .../env-setup-init-redis-cluster-redis.yml | 4 ++ .../workflows/env-setup-redis-recovery.yml | 4 ++ .github/workflows/prune-images.yml | 14 +++++-- .github/workflows/release-build-all.yml | 26 +++++++++++- .github/workflows/release-deploy-all.yml | 40 ++++++++++++------- 12 files changed, 98 insertions(+), 20 deletions(-) diff --git a/.github/workflows/clamav.yml b/.github/workflows/clamav.yml index 079bb5d70b..a19d8164aa 100644 --- a/.github/workflows/clamav.yml +++ b/.github/workflows/clamav.yml @@ -33,6 +33,10 @@ jobs: uses: actions/checkout@v4 with: ref: ${{ github.ref_name }} + - name: Install CLI tools from OpenShift Mirror + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" - name: Log in to OpenShift run: | oc login --token=${{ secrets.SA_TOKEN }} --server=${{ vars.OPENSHIFT_CLUSTER_URL }} diff --git a/.github/workflows/crunchy-db.yml b/.github/workflows/crunchy-db.yml index 9bef25f727..9062198ece 100644 --- a/.github/workflows/crunchy-db.yml +++ b/.github/workflows/crunchy-db.yml @@ -38,6 +38,10 @@ jobs: uses: actions/checkout@v4 with: ref: ${{ github.ref_name }} + - name: Install CLI tools from OpenShift Mirror + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" - name: Log in to OpenShift run: | oc login --token=${{ secrets.SA_TOKEN }} --server=${{ vars.OPENSHIFT_CLUSTER_URL }} diff --git a/.github/workflows/env-setup-build-forms-server.yml b/.github/workflows/env-setup-build-forms-server.yml index 1c4ca3000f..6d644f4277 100644 --- a/.github/workflows/env-setup-build-forms-server.yml +++ b/.github/workflows/env-setup-build-forms-server.yml @@ -18,10 +18,15 @@ jobs: BUILD_NAMESPACE: ${{ vars.BUILD_NAMESPACE }} FORMIO_SOURCE_REPO_TAG: ${{ inputs.formioTag }} steps: + - name: Install CLI tools from OpenShift Mirror + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" - name: Print env run: | echo BUILD NAMESPACE: $BUILD_NAMESPACE echo BRANCH: $BUILD_REF + echo OC CLI Version: $(oc version) # Checkout the PR branch - name: Checkout Target Branch uses: actions/checkout@v4 diff --git a/.github/workflows/env-setup-delete-redis.yml b/.github/workflows/env-setup-delete-redis.yml index 3b48c556f3..54c5e1aac2 100644 --- a/.github/workflows/env-setup-delete-redis.yml +++ b/.github/workflows/env-setup-delete-redis.yml @@ -23,6 +23,10 @@ jobs: uses: actions/checkout@v4 with: ref: ${{ github.ref_name }} + - name: Install CLI tools from OpenShift Mirror + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" - name: Log in to OpenShift run: | oc login --token=${{ secrets.SA_TOKEN }} --server=${{ vars.OPENSHIFT_CLUSTER_URL }} diff --git a/.github/workflows/env-setup-deploy-forms-server.yml b/.github/workflows/env-setup-deploy-forms-server.yml index 0266a4ee6c..4cf7a04751 100644 --- a/.github/workflows/env-setup-deploy-forms-server.yml +++ b/.github/workflows/env-setup-deploy-forms-server.yml @@ -32,10 +32,15 @@ jobs: TLS_KEY: ${{ secrets.TLS_KEY }} TLS_CA_CERTIFICATE: ${{ secrets.TLS_CA_CERTIFICATE }} steps: + - name: Install CLI tools from OpenShift Mirror + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" - name: Print env run: | echo NAMESPACE: $NAMESPACE echo HOST_PREFIX: $HOST_PREFIX + echo OC CLI Version: $(oc version) # Checkout the PR branch - name: Checkout Target Branch uses: actions/checkout@v4 diff --git a/.github/workflows/env-setup-deploy-redis.yml b/.github/workflows/env-setup-deploy-redis.yml index 2b187f4684..909918b7c1 100644 --- a/.github/workflows/env-setup-deploy-redis.yml +++ b/.github/workflows/env-setup-deploy-redis.yml @@ -23,6 +23,10 @@ jobs: uses: actions/checkout@v4 with: ref: ${{ github.ref_name }} + - name: Install CLI tools from OpenShift Mirror + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" - name: Log in to OpenShift run: | oc login --token=${{ secrets.SA_TOKEN }} --server=${{ vars.OPENSHIFT_CLUSTER_URL }} diff --git a/.github/workflows/env-setup-deploy-secrets.yml b/.github/workflows/env-setup-deploy-secrets.yml index afdff881ba..9b299624d2 100644 --- a/.github/workflows/env-setup-deploy-secrets.yml +++ b/.github/workflows/env-setup-deploy-secrets.yml @@ -60,6 +60,10 @@ jobs: uses: actions/checkout@v4 with: ref: ${{ inputs.gitRef }} + - name: Install CLI tools from OpenShift Mirror + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" - name: Log in to OpenShift run: | oc login --token=${{ secrets.SA_TOKEN }} --server=${{ vars.OPENSHIFT_CLUSTER_URL }} diff --git a/.github/workflows/env-setup-init-redis-cluster-redis.yml b/.github/workflows/env-setup-init-redis-cluster-redis.yml index b7d2c1fa31..9c29928449 100644 --- a/.github/workflows/env-setup-init-redis-cluster-redis.yml +++ b/.github/workflows/env-setup-init-redis-cluster-redis.yml @@ -23,6 +23,10 @@ jobs: uses: actions/checkout@v4 with: ref: ${{ github.ref_name }} + - name: Install CLI tools from OpenShift Mirror + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" - name: Log in to OpenShift run: | oc login --token=${{ secrets.SA_TOKEN }} --server=${{ vars.OPENSHIFT_CLUSTER_URL }} diff --git a/.github/workflows/env-setup-redis-recovery.yml b/.github/workflows/env-setup-redis-recovery.yml index 7a285b94d2..0ca66e3ff0 100644 --- a/.github/workflows/env-setup-redis-recovery.yml +++ b/.github/workflows/env-setup-redis-recovery.yml @@ -27,6 +27,10 @@ jobs: uses: actions/checkout@v4 with: ref: ${{ inputs.gitRef }} + - name: Install CLI tools from OpenShift Mirror + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" - name: Log in to OpenShift run: | oc login --token=${{ secrets.SA_TOKEN }} --server=${{ vars.OPENSHIFT_CLUSTER_URL }} diff --git a/.github/workflows/prune-images.yml b/.github/workflows/prune-images.yml index 0b1abde9f7..286ebfa560 100644 --- a/.github/workflows/prune-images.yml +++ b/.github/workflows/prune-images.yml @@ -18,9 +18,9 @@ on: required: true default: "web-sims, api-sims, queue-consumers-sims, workers-sims" ocjobs: - description: "Comma seperated list of job Image Streams to prune" - required: true - default: "db.migrations" + description: "Comma seperated list of job Image Streams to prune" + required: true + default: "db.migrations" prefix: description: "Branch prefix to restrict pruning to" required: false @@ -44,6 +44,11 @@ jobs: echo "PREFIX=${{ inputs.prefix || 'main' }}" >> $GITHUB_ENV echo "MIN_TAGS=${{ inputs.minTags || '10' }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift Mirror + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Print env run: | echo "Environment: ${ENVIRONMENT}" @@ -51,6 +56,7 @@ jobs: echo "Jobs: ${OCJOBS}" echo "Prefix: ${PREFIX}" echo "Minimum Tags: ${MIN_TAGS}" + echo "OC CLI Version: $(oc version)" - name: Checkout source code uses: actions/checkout@v4 @@ -80,4 +86,4 @@ jobs: --prefix=${PREFIX} \ --min_tags=${MIN_TAGS} \ --type=JOB - popd \ No newline at end of file + popd diff --git a/.github/workflows/release-build-all.yml b/.github/workflows/release-build-all.yml index fad7b5e3d1..02544957b4 100644 --- a/.github/workflows/release-build-all.yml +++ b/.github/workflows/release-build-all.yml @@ -26,7 +26,6 @@ jobs: echo Git Ref Name: ${{ github.ref_name }} echo Git Head: ${{ github.event.pull_request.head.sha }} echo Run Number: ${{ github.run_number }} - echo OC CLI Version: $(oc version) # Create new tag. createTag: @@ -64,10 +63,15 @@ jobs: env: BUILD_REF: ${{ needs.createTag.outputs.newTag }} steps: + - name: Install CLI tools from OpenShift Mirror + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" - name: Print env run: | echo BUILD NAMESPACE: $BUILD_NAMESPACE echo BRANCH: ${{ needs.createTag.outputs.newTag }} + echo OC CLI Version: $(oc version) - name: Checkout Target Branch uses: actions/checkout@v4 with: @@ -88,10 +92,15 @@ jobs: env: BUILD_REF: ${{ needs.createTag.outputs.newTag }} steps: + - name: Install CLI tools from OpenShift Mirror + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" - name: Print env run: | echo BUILD NAMESPACE: $BUILD_NAMESPACE echo BRANCH: ${{ needs.createTag.outputs.newTag }} + echo OC CLI Version: $(oc version) - name: Checkout Target Branch uses: actions/checkout@v4 with: @@ -112,10 +121,15 @@ jobs: env: BUILD_REF: ${{ needs.createTag.outputs.newTag }} steps: + - name: Install CLI tools from OpenShift Mirror + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" - name: Print env run: | echo BUILD NAMESPACE: $BUILD_NAMESPACE echo BRANCH: ${{ needs.createTag.outputs.newTag }} + echo OC CLI Version: $(oc version) - name: Checkout Target Branch uses: actions/checkout@v4 with: @@ -136,10 +150,15 @@ jobs: env: BUILD_REF: ${{ needs.createTag.outputs.newTag }} steps: + - name: Install CLI tools from OpenShift Mirror + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" - name: Print env run: | echo BUILD NAMESPACE: $BUILD_NAMESPACE echo BRANCH: ${{ needs.createTag.outputs.newTag }} + echo OC CLI Version: $(oc version) - name: Checkout Target Branch uses: actions/checkout@v4 with: @@ -160,10 +179,15 @@ jobs: env: BUILD_REF: ${{ needs.createTag.outputs.newTag }} steps: + - name: Install CLI tools from OpenShift Mirror + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" - name: Print env run: | echo BUILD NAMESPACE: $BUILD_NAMESPACE echo BRANCH: ${{ needs.createTag.outputs.newTag }} + echo OC CLI Version: $(oc version) - name: Checkout Target Branch uses: actions/checkout@v4 with: diff --git a/.github/workflows/release-deploy-all.yml b/.github/workflows/release-deploy-all.yml index 975d1fc242..e147db9228 100644 --- a/.github/workflows/release-deploy-all.yml +++ b/.github/workflows/release-deploy-all.yml @@ -106,21 +106,23 @@ jobs: environment: ${{ inputs.environment }} runs-on: ubuntu-latest steps: + - name: Install CLI tools from OpenShift Mirror + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" - name: Print env run: | echo Deploy Environment: ${{ inputs.environment }} echo GIT REF: ${{ inputs.gitRef }} echo BUILD NAMESPACE: $BUILD_NAMESPACE - + echo OC CLI Version: $(oc version) - name: Checkout Target Branch uses: actions/checkout@v4 with: ref: ${{ inputs.gitRef }} - - name: Log in to OpenShift run: | oc login --token=${{ secrets.SA_TOKEN }} --server=${{ vars.OPENSHIFT_CLUSTER_URL }} - - name: Run db-migrations working-directory: "./devops/" run: | @@ -133,21 +135,23 @@ jobs: runs-on: ubuntu-latest needs: run-db-migrations steps: + - name: Install CLI tools from OpenShift Mirror + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" - name: Print env run: | echo Deploy ENVIRONMENT: ${{ inputs.environment }} echo GIT REF: ${{ inputs.gitRef }} echo BUILD NAMESPACE: $BUILD_NAMESPACE - + echo OC CLI Version: $(oc version) - name: Checkout Target Branch uses: actions/checkout@v4 with: ref: ${{ inputs.gitRef }} - - name: Log in to OpenShift run: | oc login --token=${{ secrets.SA_TOKEN }} --server=${{ vars.OPENSHIFT_CLUSTER_URL }} - - name: Deploy SIMS-API working-directory: "./devops/" run: | @@ -162,20 +166,22 @@ jobs: runs-on: ubuntu-latest needs: run-db-migrations steps: + - name: Install CLI tools from OpenShift Mirror + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" - name: Print env run: | echo BUILD ENVIRONMENT: ${{ inputs.environment }} echo GIT REF: ${{ inputs.gitRef }} - + echo OC CLI Version: $(oc version) - name: Checkout Target Branch uses: actions/checkout@v4 with: ref: ${{ inputs.gitRef }} - - name: Log in to OpenShift run: | oc login --token=${{ secrets.SA_TOKEN }} --server=${{ vars.OPENSHIFT_CLUSTER_URL }} - - name: Deploy Workers working-directory: "./devops/" run: | @@ -188,21 +194,23 @@ jobs: runs-on: ubuntu-latest needs: run-db-migrations steps: + - name: Install CLI tools from OpenShift Mirror + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" - name: Print env run: | echo Deploy ENVIRONMENT: ${{ inputs.environment }} echo GIT REF: ${{ inputs.gitRef }} echo BUILD NAMESPACE: $BUILD_NAMESPACE - + echo OC CLI Version: $(oc version) - name: Checkout Target Branch uses: actions/checkout@v4 with: ref: ${{ inputs.gitRef }} - - name: Log in to OpenShift run: | oc login --token=${{ secrets.SA_TOKEN }} --server=${{ vars.OPENSHIFT_CLUSTER_URL }} - - name: Deploy Queue Consumers working-directory: "./devops/" run: | @@ -215,21 +223,23 @@ jobs: runs-on: ubuntu-latest needs: run-db-migrations steps: + - name: Install CLI tools from OpenShift Mirror + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" - name: Print env run: | echo Deploy ENVIRONMENT: ${{ inputs.environment }} echo GIT REF: ${{ inputs.gitRef }} echo BUILD NAMESPACE: $BUILD_NAMESPACE - + echo OC CLI Version: $(oc version) - name: Checkout Target Branch uses: actions/checkout@v4 with: ref: ${{ inputs.gitRef }} - - name: Log in to OpenShift run: | oc login --token=${{ secrets.SA_TOKEN }} --server=${{ vars.OPENSHIFT_CLUSTER_URL }} - - name: Deploy Web/Frontend working-directory: "./devops/" run: | From 1eb990660915fe12c6c5159a358cac94bb4db91e Mon Sep 17 00:00:00 2001 From: Bidyashish Date: Tue, 17 Dec 2024 16:09:56 -0800 Subject: [PATCH 2/3] #4019 - Virus Scan False Positives (#4129) **Acceptance Criteria** - [X] Investigate pdfs failing virus scanning and fix - [X] Try to update clamav to the most updated version (nice to have) **Notes** MaxFiles 100 was causing issue with file being not scanned and using ClamAV virus Database bank to not scan file. ` MaxFiles in ClamAV's configuration refers to the maximum number of files to be scanned within an archive, document, or any other container file. Here's a detailed explanation: For example: If scanning a ZIP file containing 15,000 files with MaxFiles 10000: Only the first 10,000 files will be scanned The remaining 5,000 files will be skipped If AlertExceedsMax is enabled, it will trigger a "Heuristics.Limits.Exceeded.MaxFiles" alert ` Update Clam AV Docker from BCGOV Repo Link: https://github.com/bcgov/common-hosted-clamav-service/pkgs/container/clamav-unprivileged Demo: Manual test in Dev using Config update. ![image](https://github.com/user-attachments/assets/96635467-caf6-4db7-adc7-828567d5a963) --- devops/helm/clam-av/_clamav/configurations/1.0/clamd.conf | 2 +- devops/helm/clam-av/_clamav/values.yaml | 2 +- sources/packages/clam-av/clamd.conf | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/devops/helm/clam-av/_clamav/configurations/1.0/clamd.conf b/devops/helm/clam-av/_clamav/configurations/1.0/clamd.conf index 5e5feca208..26c437845d 100755 --- a/devops/helm/clam-av/_clamav/configurations/1.0/clamd.conf +++ b/devops/helm/clam-av/_clamav/configurations/1.0/clamd.conf @@ -562,7 +562,7 @@ MaxRecursion 10 # Note: disabling this limit or setting it too high may result in severe damage # to the system. # Default: 10000 -MaxFiles 100 +# MaxFiles 10000 # Maximum size of a file to check for embedded PE. Files larger than this value # will skip the additional analysis step. diff --git a/devops/helm/clam-av/_clamav/values.yaml b/devops/helm/clam-av/_clamav/values.yaml index a4062aad18..01c85d9eb1 100755 --- a/devops/helm/clam-av/_clamav/values.yaml +++ b/devops/helm/clam-av/_clamav/values.yaml @@ -2,7 +2,7 @@ replicaCount: 1 image: repository: ghcr.io/bcgov/clamav-unprivileged - tag: ca3d42f3dde3c5aa9bcab636f752119bbe6a67e8 # pragma: allowlist secret + tag: 3a352496562953dc0d371f265d122a6bc06b2b44 # pragma: allowlist secret pullPolicy: IfNotPresent priorityClassName: "" diff --git a/sources/packages/clam-av/clamd.conf b/sources/packages/clam-av/clamd.conf index ea771d9f75..4aef63a202 100644 --- a/sources/packages/clam-av/clamd.conf +++ b/sources/packages/clam-av/clamd.conf @@ -562,7 +562,7 @@ MaxRecursion 10 # Note: disabling this limit or setting it too high may result in severe damage # to the system. # Default: 10000 -MaxFiles 100 +# MaxFiles 10000 # Maximum size of a file to check for embedded PE. Files larger than this value # will skip the additional analysis step. From c8a8695a4f5de837dac58a386ac7ff8cc1821839 Mon Sep 17 00:00:00 2001 From: Andrew Boni Signori <61259237+andrewsignori-aot@users.noreply.github.com> Date: Wed, 18 Dec 2024 10:05:38 -0800 Subject: [PATCH 3/3] #4076 - Queue Monitoring - Schedulers Refactor (SIN validation and loan balance) (#4118) - Refactored SIN validation-related schedulers and student loan balance import. - Adjusted E2E tests. --- ...plication-assessment.processor.e2e-spec.ts | 2 +- ...plication-assessment.processor.e2e-spec.ts | 2 +- .../src/processors/schedulers/base-queue.ts | 4 +- ...supplier-integration.scheduler.e2e-spec.ts | 10 +- ...response-integration.scheduler.e2e-spec.ts | 65 +++++------- ...alidation-process-integration.scheduler.ts | 69 ++++++------ ...-process-response-integration.scheduler.ts | 73 +++++-------- ...art-time-integration.scheduler.e2e-spec.ts | 47 +++----- ...alances-part-time-integration.scheduler.ts | 66 ++++-------- .../sin-validation.processing.service.ts | 100 ++++++++---------- ...tudent-loan-balances.processing.service.ts | 17 +-- 11 files changed, 180 insertions(+), 275 deletions(-) diff --git a/sources/packages/backend/apps/queue-consumers/src/processors/assessment/_tests_/cancel-application-assessment.processor.e2e-spec.ts b/sources/packages/backend/apps/queue-consumers/src/processors/assessment/_tests_/cancel-application-assessment.processor.e2e-spec.ts index 3f7174581a..63e0c5fa91 100644 --- a/sources/packages/backend/apps/queue-consumers/src/processors/assessment/_tests_/cancel-application-assessment.processor.e2e-spec.ts +++ b/sources/packages/backend/apps/queue-consumers/src/processors/assessment/_tests_/cancel-application-assessment.processor.e2e-spec.ts @@ -179,7 +179,7 @@ describe( expect(result).toEqual([ "Assessment cancelled with success.", "Attention, process finalized with success but some errors and/or warnings messages may require some attention.", - "Error(s): 0, Warning(s): 1, Info: 7", + "Error(s): 0, Warning(s): 1, Info: 8", ]); expect( mockedJob.containLogMessage( diff --git a/sources/packages/backend/apps/queue-consumers/src/processors/assessment/_tests_/start-application-assessment.processor.e2e-spec.ts b/sources/packages/backend/apps/queue-consumers/src/processors/assessment/_tests_/start-application-assessment.processor.e2e-spec.ts index 985f1ea16c..8532f33980 100644 --- a/sources/packages/backend/apps/queue-consumers/src/processors/assessment/_tests_/start-application-assessment.processor.e2e-spec.ts +++ b/sources/packages/backend/apps/queue-consumers/src/processors/assessment/_tests_/start-application-assessment.processor.e2e-spec.ts @@ -84,7 +84,7 @@ describe( expect(result).toEqual([ "Workflow process not executed due to the assessment not being in the correct status.", "Attention, process finalized with success but some errors and/or warnings messages may require some attention.", - "Error(s): 0, Warning(s): 2, Info: 2", + "Error(s): 0, Warning(s): 2, Info: 3", ]); expect( mockedJob.containLogMessages([ diff --git a/sources/packages/backend/apps/queue-consumers/src/processors/schedulers/base-queue.ts b/sources/packages/backend/apps/queue-consumers/src/processors/schedulers/base-queue.ts index af8320f06a..7d775268c5 100644 --- a/sources/packages/backend/apps/queue-consumers/src/processors/schedulers/base-queue.ts +++ b/sources/packages/backend/apps/queue-consumers/src/processors/schedulers/base-queue.ts @@ -26,7 +26,9 @@ export abstract class BaseQueue { async processQueue(job: Job): Promise { const processSummary = new ProcessSummary(); try { - this.logger.log(`Processing queue ${job.queue.name}, job ID ${job.id}.`); + processSummary.info( + `Processing queue ${job.queue.name}, job ID ${job.id}.`, + ); const result = await this.process(job, processSummary); processSummary.info(`${job.queue.name}, job ID ${job.id}, executed.`); const logsSum = processSummary.getLogLevelSum(); diff --git a/sources/packages/backend/apps/queue-consumers/src/processors/schedulers/cas-integration/_tests_/cas-supplier-integration.scheduler.e2e-spec.ts b/sources/packages/backend/apps/queue-consumers/src/processors/schedulers/cas-integration/_tests_/cas-supplier-integration.scheduler.e2e-spec.ts index 70cf892caa..35c28afe1a 100644 --- a/sources/packages/backend/apps/queue-consumers/src/processors/schedulers/cas-integration/_tests_/cas-supplier-integration.scheduler.e2e-spec.ts +++ b/sources/packages/backend/apps/queue-consumers/src/processors/schedulers/cas-integration/_tests_/cas-supplier-integration.scheduler.e2e-spec.ts @@ -183,7 +183,7 @@ describe(describeProcessorRootTest(QueueNames.CASSupplierIntegration), () => { "Pending suppliers to update found: 1.", "Records updated: 1.", "Attention, process finalized with success but some errors and/or warnings messages may require some attention.", - "Error(s): 0, Warning(s): 1, Info: 11", + "Error(s): 0, Warning(s): 1, Info: 12", ]); expect( mockedJob.containLogMessages([ @@ -247,7 +247,7 @@ describe(describeProcessorRootTest(QueueNames.CASSupplierIntegration), () => { "Pending suppliers to update found: 1.", "Records updated: 1.", "Attention, process finalized with success but some errors and/or warnings messages may require some attention.", - "Error(s): 0, Warning(s): 1, Info: 11", + "Error(s): 0, Warning(s): 1, Info: 12", ]); expect( mockedJob.containLogMessages([ @@ -710,7 +710,7 @@ describe(describeProcessorRootTest(QueueNames.CASSupplierIntegration), () => { "Pending suppliers to update found: 1.", "Records updated: 0.", "Attention, process finalized with success but some errors and/or warnings messages may require some attention.", - "Error(s): 0, Warning(s): 1, Info: 13", + "Error(s): 0, Warning(s): 1, Info: 14", ]); // Assert DB was updated. const updateCASSupplier = await db.casSupplier.findOne({ @@ -778,7 +778,7 @@ describe(describeProcessorRootTest(QueueNames.CASSupplierIntegration), () => { "Pending suppliers to update found: 1.", "Records updated: 0.", "Attention, process finalized with success but some errors and/or warnings messages may require some attention.", - "Error(s): 0, Warning(s): 2, Info: 12", + "Error(s): 0, Warning(s): 2, Info: 13", ]); // Assert DB was updated. const updateCASSupplier = await db.casSupplier.findOne({ @@ -842,7 +842,7 @@ describe(describeProcessorRootTest(QueueNames.CASSupplierIntegration), () => { "Pending suppliers to update found: 1.", "Records updated: 0.", "Attention, process finalized with success but some errors and/or warnings messages may require some attention.", - "Error(s): 0, Warning(s): 2, Info: 12", + "Error(s): 0, Warning(s): 2, Info: 13", ]); // Assert DB was updated. const updateCASSupplier = await db.casSupplier.findOne({ diff --git a/sources/packages/backend/apps/queue-consumers/src/processors/schedulers/esdc-integration/sin-validation-integration/_tests_/sin-validation-process-response-integration.scheduler.e2e-spec.ts b/sources/packages/backend/apps/queue-consumers/src/processors/schedulers/esdc-integration/sin-validation-integration/_tests_/sin-validation-process-response-integration.scheduler.e2e-spec.ts index a14adf28fc..0b477e08d2 100644 --- a/sources/packages/backend/apps/queue-consumers/src/processors/schedulers/esdc-integration/sin-validation-integration/_tests_/sin-validation-process-response-integration.scheduler.e2e-spec.ts +++ b/sources/packages/backend/apps/queue-consumers/src/processors/schedulers/esdc-integration/sin-validation-integration/_tests_/sin-validation-process-response-integration.scheduler.e2e-spec.ts @@ -1,9 +1,10 @@ -import { createMock, DeepMocked } from "@golevelup/ts-jest"; +import { DeepMocked } from "@golevelup/ts-jest"; import { INestApplication } from "@nestjs/common"; import { QueueNames } from "@sims/utilities"; import { createTestingAppModule, describeProcessorRootTest, + mockBullJob, } from "../../../../../../test/helpers"; import { E2EDataSources, @@ -18,7 +19,6 @@ import { getStructuredRecords, mockDownloadFiles, } from "@sims/test-utils/mocks"; -import { Job } from "bull"; const SIN_VALIDATION_FILENAME = "PCSLP.PBC.BC0000.ISR"; @@ -55,11 +55,6 @@ describe( isValidSIN: true, }, }); - - // Queued job. - const job = createMock>(); - mockDownloadFiles(sftpClientMock, [SIN_VALIDATION_FILENAME]); - mockDownloadFiles( sftpClientMock, [SIN_VALIDATION_FILENAME], @@ -74,26 +69,28 @@ describe( }, ); + // Queued job. + const mockedJob = mockBullJob(); + // Act - const processResult = await processor.processSINValidationResponse(job); + const processResult = await processor.processQueue(mockedJob.job); + // Assert const downloadedFile = path.join( process.env.ESDC_RESPONSE_FOLDER, SIN_VALIDATION_FILENAME, ); - - // Assert expect(processResult).toStrictEqual([ - { - processSummary: [ - `Processing file ${downloadedFile}.`, - "File contains 2 SIN validations.", - "Processed SIN validation record from line 2: No SIN validation was updated because the record id is already present and this is not the most updated.", - "Processed SIN validation record from line 3: Not able to find the SIN validation on line number 3 to be updated with the ESDC response.", - ], - errorsSummary: [], - }, + "ESDC SIN validation response files processed.", ]); + expect( + mockedJob.containLogMessages([ + `Processing file ${downloadedFile}.`, + "File contains 2 SIN validations.", + "Processed SIN validation record from line 2: No SIN validation was updated because the record id is already present and this is not the most updated.", + "Processed SIN validation record from line 3: Not able to find the SIN validation on line number 3 to be updated with the ESDC response.", + ]), + ).toBe(true); }); it("Should update one SIN validation record and skip one when one SIN response is from SIMS and the other is from SFAS.", async () => { @@ -106,11 +103,6 @@ describe( dateReceived: null, }, }); - - // Queued job. - const job = createMock>(); - mockDownloadFiles(sftpClientMock, [SIN_VALIDATION_FILENAME]); - mockDownloadFiles( sftpClientMock, [SIN_VALIDATION_FILENAME], @@ -125,26 +117,27 @@ describe( }, ); + // Queued job. + const mockedJob = mockBullJob(); + // Act - const processResult = await processor.processSINValidationResponse(job); + const processResult = await processor.processQueue(mockedJob.job); // Assert const downloadedFile = path.join( process.env.ESDC_RESPONSE_FOLDER, SIN_VALIDATION_FILENAME, ); - - // Assert expect(processResult).toStrictEqual([ - { - processSummary: [ - `Processing file ${downloadedFile}.`, - "File contains 2 SIN validations.", - "Processed SIN validation record from line 2: SIN validation record updated.", - "Processed SIN validation record from line 3: Not able to find the SIN validation on line number 3 to be updated with the ESDC response.", - ], - errorsSummary: [], - }, + "ESDC SIN validation response files processed.", ]); + expect( + mockedJob.containLogMessages([ + `Processing file ${downloadedFile}.`, + "File contains 2 SIN validations.", + "Processed SIN validation record from line 2: SIN validation record updated.", + "Processed SIN validation record from line 3: Not able to find the SIN validation on line number 3 to be updated with the ESDC response.", + ]), + ).toBe(true); }); }, ); diff --git a/sources/packages/backend/apps/queue-consumers/src/processors/schedulers/esdc-integration/sin-validation-integration/sin-validation-process-integration.scheduler.ts b/sources/packages/backend/apps/queue-consumers/src/processors/schedulers/esdc-integration/sin-validation-integration/sin-validation-process-integration.scheduler.ts index be27abdee2..e630848eef 100644 --- a/sources/packages/backend/apps/queue-consumers/src/processors/schedulers/esdc-integration/sin-validation-integration/sin-validation-process-integration.scheduler.ts +++ b/sources/packages/backend/apps/queue-consumers/src/processors/schedulers/esdc-integration/sin-validation-integration/sin-validation-process-integration.scheduler.ts @@ -1,11 +1,14 @@ -import { InjectQueue, Process, Processor } from "@nestjs/bull"; +import { InjectQueue, Processor } from "@nestjs/bull"; import { SINValidationProcessingService } from "@sims/integrations/esdc-integration"; import { QueueService } from "@sims/services/queue"; import { QueueNames } from "@sims/utilities"; import { Job, Queue } from "bull"; -import { QueueProcessSummary } from "../../../models/processors.models"; import { BaseScheduler } from "../../base-scheduler"; -import { ESDCFileResult } from "../models/esdc.models"; +import { + InjectLogger, + LoggerService, + ProcessSummary, +} from "@sims/utilities/logger"; @Processor(QueueNames.SINValidationProcessIntegration) export class SINValidationProcessIntegrationScheduler extends BaseScheduler { @@ -18,47 +21,35 @@ export class SINValidationProcessIntegrationScheduler extends BaseScheduler { - throw new Error("Method not implemented."); - } - - /** - * When implemented in a derived class, process the queue job. - * To be implemented. - */ - protected async process(): Promise { - throw new Error("Method not implemented."); - } - /** * Identifies all the students that still do not have their SIN * validated and create the validation request for ESDC processing. - * @params job job details. + * @param _job job details. + * @param processSummary process summary for logging. * @returns processing result log. */ - @Process() - async processSINValidation(job: Job): Promise { - const summary = new QueueProcessSummary({ - appLogger: this.logger, - jobLogger: job, - }); - await summary.info( - `Processing SIN validation integration job ${job.id} of type ${job.name}.`, - ); - await summary.info("Sending ESDC SIN validation request file."); + protected async process( + _job: Job, + processSummary: ProcessSummary, + ): Promise { + const childProcessSummary = new ProcessSummary(); + processSummary.children(childProcessSummary); const uploadResult = - await this.sinValidationProcessingService.uploadSINValidationRequests(); - await summary.info("ESDC SIN validation request file sent."); - await summary.info( - `Completed SIN validation integration job ${job.id} of type ${job.name}.`, - ); - return { - generatedFile: uploadResult.generatedFile, - uploadedRecords: uploadResult.uploadedRecords, - }; + await this.sinValidationProcessingService.uploadSINValidationRequests( + childProcessSummary, + ); + return [ + `Generated file: ${uploadResult.generatedFile}`, + `Uploaded records: ${uploadResult.uploadedRecords}`, + ]; } + + /** + * Setting the logger here allows the correct context to be set + * during the property injection. + * Even if the logger is not used, it is required to be set, to + * allow the base classes to write logs using the correct context. + */ + @InjectLogger() + logger: LoggerService; } diff --git a/sources/packages/backend/apps/queue-consumers/src/processors/schedulers/esdc-integration/sin-validation-integration/sin-validation-process-response-integration.scheduler.ts b/sources/packages/backend/apps/queue-consumers/src/processors/schedulers/esdc-integration/sin-validation-integration/sin-validation-process-response-integration.scheduler.ts index 2eb7ecb561..25d017e210 100644 --- a/sources/packages/backend/apps/queue-consumers/src/processors/schedulers/esdc-integration/sin-validation-integration/sin-validation-process-response-integration.scheduler.ts +++ b/sources/packages/backend/apps/queue-consumers/src/processors/schedulers/esdc-integration/sin-validation-integration/sin-validation-process-response-integration.scheduler.ts @@ -1,12 +1,14 @@ -import { InjectQueue, Process, Processor } from "@nestjs/bull"; +import { InjectQueue, Processor } from "@nestjs/bull"; import { SINValidationProcessingService } from "@sims/integrations/esdc-integration"; import { QueueService } from "@sims/services/queue"; -import { SystemUsersService } from "@sims/services/system-users"; import { QueueNames } from "@sims/utilities"; import { Job, Queue } from "bull"; -import { QueueProcessSummary } from "../../../models/processors.models"; import { BaseScheduler } from "../../base-scheduler"; -import { ProcessResponseQueue } from "../models/esdc.models"; +import { + InjectLogger, + LoggerService, + ProcessSummary, +} from "@sims/utilities/logger"; @Processor(QueueNames.SINValidationResponseIntegration) export class SINValidationResponseIntegrationScheduler extends BaseScheduler { @@ -15,57 +17,34 @@ export class SINValidationResponseIntegrationScheduler extends BaseScheduler, queueService: QueueService, private readonly sinValidationProcessingService: SINValidationProcessingService, - private readonly systemUsersService: SystemUsersService, ) { super(schedulerQueue, queueService); } - /** - * To be removed once the method {@link process} is implemented. - * This method "hides" the {@link Process} decorator from the base class. - */ - async processQueue(): Promise { - throw new Error("Method not implemented."); - } - - /** - * When implemented in a derived class, process the queue job. - * To be implemented. - */ - protected async process(): Promise { - throw new Error("Method not implemented."); - } - /** * Download all SIN validation files from ESDC response folder on SFTP and process them all. - * @params job job details. + * @param _job process job. + * @param processSummary process summary for logging. * @returns summary with what was processed and the list of all errors, if any. */ - @Process() - async processSINValidationResponse( - job: Job, - ): Promise { - const summary = new QueueProcessSummary({ - appLogger: this.logger, - jobLogger: job, - }); - await summary.info( - `Processing SIN validation integration job ${job.id} of type ${job.name}.`, - ); - await summary.info("Processing ESDC SIN validation response files."); - const auditUser = this.systemUsersService.systemUser; - const results = await this.sinValidationProcessingService.processResponses( - auditUser.id, - ); - await summary.info("ESDC SIN validation response files processed."); - await summary.info( - `Completed SIN validation integration job ${job.id} of type ${job.name}.`, + protected async process( + _job: Job, + processSummary: ProcessSummary, + ): Promise { + const childProcessSummary = new ProcessSummary(); + processSummary.children(childProcessSummary); + await this.sinValidationProcessingService.processResponses( + childProcessSummary, ); - return results.map((result) => { - return { - processSummary: result.processSummary, - errorsSummary: result.errorsSummary, - }; - }); + return "ESDC SIN validation response files processed."; } + + /** + * Setting the logger here allows the correct context to be set + * during the property injection. + * Even if the logger is not used, it is required to be set, to + * allow the base classes to write logs using the correct context. + */ + @InjectLogger() + logger: LoggerService; } diff --git a/sources/packages/backend/apps/queue-consumers/src/processors/schedulers/esdc-integration/student-loan-balances/_tests_/student-loan-balances-part-time-integration.scheduler.e2e-spec.ts b/sources/packages/backend/apps/queue-consumers/src/processors/schedulers/esdc-integration/student-loan-balances/_tests_/student-loan-balances-part-time-integration.scheduler.e2e-spec.ts index 54675537b5..06133431fa 100644 --- a/sources/packages/backend/apps/queue-consumers/src/processors/schedulers/esdc-integration/student-loan-balances/_tests_/student-loan-balances-part-time-integration.scheduler.e2e-spec.ts +++ b/sources/packages/backend/apps/queue-consumers/src/processors/schedulers/esdc-integration/student-loan-balances/_tests_/student-loan-balances-part-time-integration.scheduler.e2e-spec.ts @@ -78,9 +78,7 @@ describe( const mockedJob = mockBullJob(); mockDownloadFiles(sftpClientMock, [STUDENT_LOAN_BALANCES_FILENAME]); // Act - const result = await processor.processStudentLoanBalancesFiles( - mockedJob.job, - ); + const result = await processor.processQueue(mockedJob.job); // Assert expect(result.length).toBe(1); expect( @@ -89,7 +87,7 @@ describe( `Inserted Student Loan balances file ${STUDENT_LOAN_BALANCES_FILENAME}.`, ]), ).toBe(true); - expect(result).toContain("Process finalized with success."); + expect(result).toStrictEqual(["Process finalized with success."]); // Expect the file was archived on SFTP. expect(sftpClientMock.rename).toHaveBeenCalled(); const studentLoanBalance = await db.studentLoanBalance.find({ @@ -120,12 +118,10 @@ describe( STUDENT_LOAN_BALANCES_STUDENT_NOT_FOUND_FILENAME, ]); // Act - const result = await processor.processStudentLoanBalancesFiles( - mockedJob.job, - ); + const result = await processor.processQueue(mockedJob.job); // Assert expect(result.length).toBe(1); - expect(result).toContain("Process finalized with success."); + expect(result).toStrictEqual(["Process finalized with success."]); expect( mockedJob.containLogMessages(["Student not found for line 2."]), ).toBe(true); @@ -147,19 +143,16 @@ describe( mockDownloadFiles(sftpClientMock, [ STUDENT_LOAN_BALANCES_RECORDS_MISMATCH_FILENAME, ]); - // Act - const result = await processor.processStudentLoanBalancesFiles( - mockedJob.job, - ); - // Assert - expect(result.length).toBe(3); - expect(result).toContain( - "Attention, process finalized with success but some errors and/or warnings messages may require some attention.", + + // Act/Assert + await expect(processor.processQueue(mockedJob.job)).rejects.toThrowError( + "One or more errors were reported during the process, please see logs for details.", ); + // Assert log has the extra details. expect( - mockedJob.containLogMessages([ + mockedJob.containLogMessage( "Records in footer does not match the number of records.", - ]), + ), ).toBe(true); }); @@ -189,12 +182,10 @@ describe( mockDownloadFiles(sftpClientMock, [STUDENT_LOAN_BALANCES_FILENAME]); // Act - const result = await processor.processStudentLoanBalancesFiles( - mockedJob.job, - ); + const result = await processor.processQueue(mockedJob.job); // Assert - expect(result).toContain("Process finalized with success."); + expect(result).toStrictEqual(["Process finalized with success."]); expect( mockedJob.containLogMessages([ "Checking if zero balance records must be inserted.", @@ -244,12 +235,10 @@ describe( mockDownloadFiles(sftpClientMock, [STUDENT_LOAN_BALANCES_FILENAME]); // Act - const result = await processor.processStudentLoanBalancesFiles( - mockedJob.job, - ); + const result = await processor.processQueue(mockedJob.job); // Assert - expect(result).toContain("Process finalized with success."); + expect(result).toStrictEqual(["Process finalized with success."]); expect( mockedJob.containLogMessages([ "Checking if zero balance records must be inserted.", @@ -298,12 +287,10 @@ describe( mockDownloadFiles(sftpClientMock, [STUDENT_LOAN_BALANCES_FILENAME]); // Act - const result = await processor.processStudentLoanBalancesFiles( - mockedJob.job, - ); + const result = await processor.processQueue(mockedJob.job); // Assert - expect(result).toContain("Process finalized with success."); + expect(result).toStrictEqual(["Process finalized with success."]); expect( mockedJob.containLogMessages([ "Checking if zero balance records must be inserted.", diff --git a/sources/packages/backend/apps/queue-consumers/src/processors/schedulers/esdc-integration/student-loan-balances/student-loan-balances-part-time-integration.scheduler.ts b/sources/packages/backend/apps/queue-consumers/src/processors/schedulers/esdc-integration/student-loan-balances/student-loan-balances-part-time-integration.scheduler.ts index 2fa94db6a0..2bf68adf2f 100644 --- a/sources/packages/backend/apps/queue-consumers/src/processors/schedulers/esdc-integration/student-loan-balances/student-loan-balances-part-time-integration.scheduler.ts +++ b/sources/packages/backend/apps/queue-consumers/src/processors/schedulers/esdc-integration/student-loan-balances/student-loan-balances-part-time-integration.scheduler.ts @@ -1,4 +1,4 @@ -import { InjectQueue, Process, Processor } from "@nestjs/bull"; +import { InjectQueue, Processor } from "@nestjs/bull"; import { Job, Queue } from "bull"; import { BaseScheduler } from "../../base-scheduler"; import { QueueNames } from "@sims/utilities"; @@ -8,12 +8,7 @@ import { LoggerService, ProcessSummary, } from "@sims/utilities/logger"; -import { - getSuccessMessageWithAttentionCheck, - logProcessSummaryToJobLogger, -} from "../../../../utilities"; import { StudentLoanBalancesProcessingService } from "@sims/integrations/esdc-integration"; -import { SystemUsersService } from "@sims/services"; /** * Process Student Loan Balances file from the SFTP location. @@ -25,60 +20,35 @@ export class StudentLoanBalancesPartTimeIntegrationScheduler extends BaseSchedul schedulerQueue: Queue, queueService: QueueService, private readonly studentLoanBalancesProcessingService: StudentLoanBalancesProcessingService, - private readonly systemUsersService: SystemUsersService, ) { super(schedulerQueue, queueService); } - /** - * To be removed once the method {@link process} is implemented. - * This method "hides" the {@link Process} decorator from the base class. - */ - async processQueue(): Promise { - throw new Error("Method not implemented."); - } - - /** - * When implemented in a derived class, process the queue job. - * To be implemented. - */ - protected async process(): Promise { - throw new Error("Method not implemented."); - } - /** * Process Student Loan Balances files from the SFTP * and update the database with processed records. * @param job Student Loan Balances job. + * @param processSummary process summary. * @returns processing result. */ - @Process() - async processStudentLoanBalancesFiles(job: Job): Promise { - const processSummary = new ProcessSummary(); - try { - processSummary.info("Processing Student Loan Balances files."); - const auditUser = this.systemUsersService.systemUser; - const serviceProcessSummary = new ProcessSummary(); - processSummary.children(serviceProcessSummary); - await this.studentLoanBalancesProcessingService.processStudentLoanBalances( - serviceProcessSummary, - auditUser.id, - ); - processSummary.info("Completed processing Student Loan Balances files."); - return getSuccessMessageWithAttentionCheck( - ["Process finalized with success."], - processSummary, - ); - } catch (error: unknown) { - const errorMessage = "Unexpected error while executing the job."; - processSummary.error(errorMessage, error); - return [errorMessage]; - } finally { - this.logger.logProcessSummary(processSummary); - await logProcessSummaryToJobLogger(processSummary, job); - } + protected async process( + _job: Job, + processSummary: ProcessSummary, + ): Promise { + const serviceProcessSummary = new ProcessSummary(); + processSummary.children(serviceProcessSummary); + await this.studentLoanBalancesProcessingService.processStudentLoanBalances( + serviceProcessSummary, + ); + return "Process finalized with success."; } + /** + * Setting the logger here allows the correct context to be set + * during the property injection. + * Even if the logger is not used, it is required to be set, to + * allow the base classes to write logs using the correct context. + */ @InjectLogger() logger: LoggerService; } diff --git a/sources/packages/backend/libs/integrations/src/esdc-integration/sin-validation/sin-validation.processing.service.ts b/sources/packages/backend/libs/integrations/src/esdc-integration/sin-validation/sin-validation.processing.service.ts index 3336e0345d..f0cea8b607 100644 --- a/sources/packages/backend/libs/integrations/src/esdc-integration/sin-validation/sin-validation.processing.service.ts +++ b/sources/packages/backend/libs/integrations/src/esdc-integration/sin-validation/sin-validation.processing.service.ts @@ -1,4 +1,8 @@ -import { LoggerService, InjectLogger } from "@sims/utilities/logger"; +import { + LoggerService, + InjectLogger, + ProcessSummary, +} from "@sims/utilities/logger"; import { Injectable } from "@nestjs/common"; import { SequenceControlService, SystemUsersService } from "@sims/services"; import { SINValidationIntegrationService } from "./sin-validation.integration.service"; @@ -8,7 +12,6 @@ import { SINValidationResponseResult, SINValidationUploadResult, } from "./models/sin-validation-models"; -import { ProcessSFTPResponseResult } from "../models/esdc-integration.model"; import * as path from "path"; import { EntityManager } from "typeorm"; import { ConfigService, ESDCIntegrationConfig } from "@sims/utilities/config"; @@ -17,7 +20,6 @@ import { SINValidationService, StudentService, } from "@sims/integrations/services"; -import { parseJSONError } from "@sims/utilities"; /** * Manages the process to generate SIN validations requests to ESDC and allow @@ -40,11 +42,14 @@ export class SINValidationProcessingService { /** * Identifies all the students that still do not have their SIN * validated and create the validation request for ESDC processing. + * @param processSummary process summary for logging. * @returns result of the upload operation. */ - async uploadSINValidationRequests(): Promise { + async uploadSINValidationRequests( + processSummary: ProcessSummary, + ): Promise { const auditUser = this.systemUsersService.systemUser; - this.logger.log("Retrieving students with pending SIN validation..."); + processSummary.info("Retrieving students with pending SIN validation..."); const students = await this.studentService.getStudentsPendingSinValidation(); if (!students.length) { @@ -54,7 +59,7 @@ export class SINValidationProcessingService { }; } - this.logger.log(`Found ${students.length} student(s).`); + processSummary.info(`Found ${students.length} student(s).`); const sinValidationRecords = students.map((student) => { return this.createSINValidationRecordFromStudent(student); }); @@ -64,7 +69,7 @@ export class SINValidationProcessingService { ESDC_SIN_VALIDATION_SEQUENCE_GROUP_NAME, async (nextSequenceNumber: number, entityManager: EntityManager) => { try { - this.logger.log("Creating SIN validation file content."); + processSummary.info("Creating SIN validation file content."); const fileContent = this.sinValidationIntegrationService.createRequestFileContent( sinValidationRecords, @@ -78,7 +83,9 @@ export class SINValidationProcessingService { // Updates the records in SIN Validation table for the particular user. // If the upload fails the rollback will be executed on DB. - this.logger.log("Updating the records in the SIN Validation table."); + processSummary.info( + "Updating the records in the SIN Validation table.", + ); const sinValidationRepo = entityManager.getRepository(SINValidation); await this.sinValidationService.updateSentRecords( sinValidationRecords, @@ -86,28 +93,25 @@ export class SINValidationProcessingService { auditUser.id, sinValidationRepo, ); - this.logger.log("SIN Validation table updated."); - - this.logger.log("Uploading content."); + processSummary.info("SIN Validation table updated."); + processSummary.info("Uploading content."); await this.sinValidationIntegrationService.uploadContent( fileContent, fileInfo.filePath, ); - this.logger.log("Content uploaded."); - + processSummary.info("Content uploaded."); uploadResult = { generatedFile: fileInfo.filePath, uploadedRecords: fileContent.length - 2, // Do not consider header and footer. }; - } catch (error) { - this.logger.error( - `Error while uploading content for SIN validation: ${error}`, - ); - throw error; + } catch (error: unknown) { + const errorMessage = + "Error while uploading content for SIN validation."; + this.logger.error(errorMessage, error); + throw new Error(errorMessage, error); } }, ); - return uploadResult; } @@ -131,13 +135,10 @@ export class SINValidationProcessingService { /** * Download all SIN validation files from ESDC response folder on SFTP and process them all. - * @param auditUserId user that should be considered the one that is - * causing the changes. + * @param processSummary process summary for logging. * @returns summary with what was processed and the list of all errors, if any. */ - async processResponses( - auditUserId: number, - ): Promise { + async processResponses(processSummary: ProcessSummary): Promise { const remoteFilePaths = await this.sinValidationIntegrationService.getResponseFilesFullPath( this.esdcConfig.ftpResponseFolder, @@ -146,48 +147,42 @@ export class SINValidationProcessingService { "i", ), ); - const processFiles: ProcessSFTPResponseResult[] = []; for (const remoteFilePath of remoteFilePaths) { - processFiles.push(await this.processFile(remoteFilePath, auditUserId)); + const childProcessSummary = new ProcessSummary(); + processSummary.children(childProcessSummary); + await this.processFile(remoteFilePath, childProcessSummary); } - return processFiles; } /** * Process each individual ESDC SIN validation response file from the SFTP. * @param remoteFilePath ESDC SIN validation response file to be processed. - * @param auditUserId user that should be considered the one that is - * causing the changes. + * @param processSummary process summary for logging. * @returns process summary and errors summary. */ private async processFile( remoteFilePath: string, - auditUserId: number, - ): Promise { - const result = new ProcessSFTPResponseResult(); - result.processSummary.push(`Processing file ${remoteFilePath}.`); - + processSummary: ProcessSummary, + ): Promise { + processSummary.info(`Processing file ${remoteFilePath}.`); let responseResult: SINValidationResponseResult; - try { responseResult = await this.sinValidationIntegrationService.downloadResponseFile( remoteFilePath, ); - } catch (error) { - this.logger.error(error); - result.errorsSummary.push( - `Error downloading file ${remoteFilePath}. Error: ${error}`, - ); + } catch (error: unknown) { + const errorMessage = `Error downloading file ${remoteFilePath}.`; + this.logger.error(errorMessage, error); // Abort the process nicely not throwing an exception and // allowing other response files to be processed. - return result; + processSummary.error(errorMessage, error); + return; } - - result.processSummary.push( + // File downloaded successfully. + processSummary.info( `File contains ${responseResult.records.length} SIN validations.`, ); - // Get only the file name for logging. const fileName = path.basename(remoteFilePath); for (const sinValidationRecord of responseResult.records) { @@ -197,33 +192,30 @@ export class SINValidationProcessingService { sinValidationRecord, fileName, responseResult.header.processDate, - auditUserId, + this.systemUsersService.systemUser.id, ); - result.processSummary.push( + processSummary.info( `Processed SIN validation record from line ${sinValidationRecord.lineNumber}: ${updatedResult.operationDescription}`, ); - } catch (error) { + } catch (error: unknown) { // Log the error but allow the process to continue. const errorDescription = `Error processing record line number ${sinValidationRecord.lineNumber} from file ${fileName}`; - result.errorsSummary.push(errorDescription); - this.logger.error(`${errorDescription}. ${error}`); + processSummary.error(errorDescription, error); + this.logger.error(errorDescription, error); } } try { // Archive file. await this.sinValidationIntegrationService.archiveFile(remoteFilePath); - } catch (error) { + } catch (error: unknown) { // Log the error but allow the process to continue. // If there was an issue only during the file archiving, it will be // processed again and could be archived in the second attempt. const logMessage = `Error while archiving ESDC SIN validation response file: ${remoteFilePath}.`; - result.errorsSummary.push(logMessage); - result.errorsSummary.push(parseJSONError(error)); + processSummary.error(logMessage, error); this.logger.error(logMessage, error); } - - return result; } @InjectLogger() diff --git a/sources/packages/backend/libs/integrations/src/esdc-integration/student-loan-balances/student-loan-balances.processing.service.ts b/sources/packages/backend/libs/integrations/src/esdc-integration/student-loan-balances/student-loan-balances.processing.service.ts index c04d18b355..3a0df377e4 100644 --- a/sources/packages/backend/libs/integrations/src/esdc-integration/student-loan-balances/student-loan-balances.processing.service.ts +++ b/sources/packages/backend/libs/integrations/src/esdc-integration/student-loan-balances/student-loan-balances.processing.service.ts @@ -13,9 +13,9 @@ import { DataSource } from "typeorm"; import { DatabaseConstraintNames, StudentLoanBalance, - User, isDatabaseConstraintError, } from "@sims/sims-db"; +import { SystemUsersService } from "@sims/services"; /** * Manages to process the Student Loan Balances files @@ -30,6 +30,7 @@ export class StudentLoanBalancesProcessingService { private readonly studentLoanBalancesIntegrationService: StudentLoanBalancesIntegrationService, private readonly studentService: StudentService, private readonly studentLoanBalanceService: StudentLoanBalanceService, + private readonly systemUsersService: SystemUsersService, ) { this.esdcConfig = config.esdcIntegration; } @@ -37,12 +38,10 @@ export class StudentLoanBalancesProcessingService { /** * Download all files from SFTP and process them all. * @param parentProcessSummary parent process summary. - * @param auditUserId user that should be considered the one that is * causing the changes. */ async processStudentLoanBalances( parentProcessSummary: ProcessSummary, - auditUserId: number, ): Promise { // Process summary to be populated by each enqueueing workflow call. const remoteFilePaths = @@ -56,24 +55,17 @@ export class StudentLoanBalancesProcessingService { for (const remoteFilePath of remoteFilePaths) { const fileProcessingSummary = new ProcessSummary(); parentProcessSummary.children(fileProcessingSummary); - await this.processFile( - remoteFilePath, - fileProcessingSummary, - auditUserId, - ); + await this.processFile(remoteFilePath, fileProcessingSummary); } } /** * Process each individual Student Loan Balances response file from the SFTP. * @param remoteFilePath Student Loan Balances response file to be processed. - * @param auditUserId user that should be considered the one that is - * causing the changes. */ private async processFile( remoteFilePath: string, childrenProcessSummary: ProcessSummary, - auditUserId: number, ): Promise { childrenProcessSummary.info(`Processing file ${remoteFilePath}.`); let studentLoanBalancesSFTPResponseFile: StudentLoanBalancesSFTPResponseFile; @@ -106,14 +98,13 @@ export class StudentLoanBalancesProcessingService { ); continue; } - const auditUser = { id: auditUserId } as User; await studentLoanBalancesRepo.insert({ student: student, cslBalance: studentLoanBalanceRecord.cslBalance, balanceDate: getISODateOnlyString( studentLoanBalancesSFTPResponseFile.header.balanceDate, ), - creator: auditUser, + creator: this.systemUsersService.systemUser, }); } childrenProcessSummary.info(