diff --git a/.github/workflows/pagecountcalculator-cd.yml b/.github/workflows/pagecountcalculator-cd.yml new file mode 100644 index 000000000..47976ada7 --- /dev/null +++ b/.github/workflows/pagecountcalculator-cd.yml @@ -0,0 +1,113 @@ +name: PageCountCalculator CD + + +on: + push: + branches: + - dev + - main + - dev-marshal + - test-marshal + - dev-rook + - test-rook + paths: + - "computingservices/PageCountCalculator/**" + - ".github/workflows/pagecountcalculator-cd.yml" + +defaults: + run: + shell: bash + working-directory: ./computingservices/PageCountCalculator + +env: + APP_NAME: "reviewer-pagecountcalculator" + TOOLS_NAME: "${{secrets.OPENSHIFT4_REPOSITORY}}" + +jobs: + pagecountcalculator-cd-by-push: + runs-on: ubuntu-20.04 + + if: github.event_name == 'push' && github.repository == 'bcgov/foi-docreviewer' + steps: + - uses: actions/checkout@v2 + - name: Set ENV variables for dev branch + if: ${{ github.ref_name == 'dev' }} + shell: bash + run: | + echo "For ${{ github.ref_name }} branch" + echo "TAG_NAME=dev" >> $GITHUB_ENV + echo "BRANCH_NAME=dev" >> $GITHUB_ENV + echo "ENV_NAME=dev" >> $GITHUB_ENV + + - name: Set ENV variables for main branch + if: ${{ github.ref_name == 'main' }} + shell: bash + run: | + echo "For ${{ github.ref_name }} branch" + echo "TAG_NAME=test" >> $GITHUB_ENV + echo "BRANCH_NAME=main" >> $GITHUB_ENV + echo "ENV_NAME=test" >> $GITHUB_ENV + + - name: Set ENV variables for dev-marshal branch + if: ${{ github.ref_name == 'dev-marshal' }} + run: | + echo "For ${{ github.ref_name }} branch" + echo "TAG_NAME=dev-marshal" >> $GITHUB_ENV + echo "BRANCH_NAME=dev-marshal" >> $GITHUB_ENV + echo "ENV_NAME=dev" >> $GITHUB_ENV + + - name: Set ENV variables for test-marshal branch + if: ${{ github.ref_name == 'test-marshal' }} + run: | + echo "For ${{ github.ref_name }} branch" + echo "TAG_NAME=test-marshal" >> $GITHUB_ENV + echo "BRANCH_NAME=test-marshal" >> $GITHUB_ENV + echo "ENV_NAME=test" >> $GITHUB_ENV + + - name: Set ENV variables for dev-rook branch + if: ${{ github.ref_name == 'dev-rook' }} + run: | + echo "For ${{ github.ref_name }} branch" + echo "TAG_NAME=dev-rook" >> $GITHUB_ENV + echo "BRANCH_NAME=dev-rook" >> $GITHUB_ENV + echo "ENV_NAME=dev" >> $GITHUB_ENV + echo "ENV_TAG_NAME=rook" >> $GITHUB_ENV + + - name: Set ENV variables for test-rook branch + if: ${{ github.ref_name == 'test-rook' }} + run: | + echo "For ${{ github.ref_name }} branch" + echo "TAG_NAME=test-rook" >> $GITHUB_ENV + echo "BRANCH_NAME=test-rook" >> $GITHUB_ENV + echo "ENV_NAME=test" >> $GITHUB_ENV + echo "ENV_TAG_NAME=rook" >> $GITHUB_ENV + + - name: Login Openshift + shell: bash + run: | + oc login --server=${{secrets.OPENSHIFT4_LOGIN_REGISTRY}} --token=${{secrets.OPENSHIFT4_SA_TOKEN}} + + - name: Tools project + shell: bash + run: | + oc project ${{ env.TOOLS_NAME }}-tools + + - name: Build from ${{ env.BRANCH_NAME }} branch + shell: bash + run: | + oc patch bc/${{ env.APP_NAME }}-build -p '{"spec":{"source":{"contextDir":"/computingservices/PageCountCalculator","git":{"ref":"${{ env.BRANCH_NAME }}"}}}}' + + - name: Start Build Openshift + shell: bash + run: | + oc start-build ${{ env.APP_NAME }}-build --wait + + - name: Tag+Deploy for ${{ env.TAG_NAME }} + shell: bash + run: | + oc tag ${{ env.APP_NAME }}:latest ${{ env.APP_NAME }}:${{ env.TAG_NAME }} + + # - name: Watch new rollout (trigger by image change in Openshift) + # shell: bash + # run: | + # oc rollout status dc/${{ env.APP_NAME }}-{{ env.ENV_TAG_NAME }} -n ${{ env.TOOLS_NAME }}-${{ env.ENV_NAME }} -w diff --git a/.github/workflows/pagecountcalculator-ci.yml b/.github/workflows/pagecountcalculator-ci.yml new file mode 100644 index 000000000..4a521a113 --- /dev/null +++ b/.github/workflows/pagecountcalculator-ci.yml @@ -0,0 +1,54 @@ +name: PageCountCalculator CI + + +on: + pull_request: + branches: + - main + - dev + - dev-marshal + - test-marshal + - dev-rook + - test-rook + paths: + - "computingservices/PageCountCalculator/**" + +defaults: + run: + shell: bash + working-directory: ./computingservices/PageCountCalculator + +jobs: + docker-build-check: + runs-on: ubuntu-20.04 + name: Build dockerfile to ensure it works + + steps: + - uses: actions/checkout@v2 + - name: docker build to check strictness + id: docker-build + run: | + docker build -f Dockerfile.local . + + python-build-check: + runs-on: ubuntu-20.04 + name: Build python to ensure it works + + strategy: + matrix: + # python-version: [3.6, 3.7, 3.8, 3.9] + python-version: [3.9] + + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install flake8 pytest + if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + + diff --git a/.gitignore b/.gitignore index 3b52caa4c..3e5f06c34 100644 --- a/.gitignore +++ b/.gitignore @@ -26,6 +26,7 @@ computingservices/poc/env/* computingservices/DedupeServices/env/* computingservices/PDFStitchServices/env/* computingservices/ZippingServices/env/* +computingservices/PageCountCalculator/env/* ################################################################################ diff --git a/api/migrations/versions/18a45d1b33cc_DocumentDeletedPages.py b/api/migrations/versions/18a45d1b33cc_DocumentDeletedPages.py new file mode 100644 index 000000000..2e3939190 --- /dev/null +++ b/api/migrations/versions/18a45d1b33cc_DocumentDeletedPages.py @@ -0,0 +1,40 @@ +"""empty message + +Revision ID: 18a45d1b33cc +Revises: 7db1cd6f0b36 +Create Date: 2024-03-07 10:00:01.341943 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = '18a45d1b33cc' +down_revision = 'c787e6d82903' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('DocumentDeletedPages', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('ministryrequestid', sa.Integer(), nullable=False), + sa.Column('redactionlayerid', sa.Integer(), nullable=False), + sa.Column('documentid', sa.Integer(), nullable=False), + sa.Column('pagemetadata', postgresql.JSON(astext_type=sa.Text()), nullable=False), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.Column('createdby', postgresql.JSON(astext_type=sa.Text()), nullable=False), + sa.PrimaryKeyConstraint('id'), + ) + op.add_column('Documents', sa.Column('originalpagecount', sa.Integer, unique=False, nullable=True, default=False)) + op.execute('UPDATE "Documents" SET originalpagecount = pagecount, updatedby= \'{"user":"System"}\', updated_at=now();commit;') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('DocumentDeletedPages') + op.drop_column('Documents', 'originalpagecount') + # ### end Alembic commands ### diff --git a/api/migrations/versions/c787e6d82903_PageCalculatorJob.py b/api/migrations/versions/c787e6d82903_PageCalculatorJob.py new file mode 100644 index 000000000..880206f08 --- /dev/null +++ b/api/migrations/versions/c787e6d82903_PageCalculatorJob.py @@ -0,0 +1,36 @@ +"""empty message + +Revision ID: c787e6d82903 +Revises: e0e3a10b850d +Create Date: 2024-02-06 15:12:03.310271 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + + +# revision identifiers, used by Alembic. +revision = 'c787e6d82903' +down_revision = 'e0e3a10b850d' +branch_labels = None +depends_on = None + + +def upgrade(): + op.create_table('PageCalculatorJob', + sa.Column('pagecalculatorjobid', sa.Integer(), primary_key=True, autoincrement=True, nullable=False), + sa.Column('version', sa.Integer(), nullable=False), + sa.Column('ministryrequestid', sa.Integer(), nullable=False), + sa.Column('inputmessage', postgresql.JSON(astext_type=sa.Text()), nullable=False), + sa.Column('pagecount', postgresql.JSON(astext_type=sa.Text()), nullable=True), + sa.Column('status', sa.String(length=120), nullable=False), + sa.Column('message', sa.Text, nullable=True), + sa.Column('createdat', sa.TIMESTAMP, nullable=False, server_default=sa.func.now()), + sa.Column('createdby', sa.String(length=120), nullable=True), + sa.PrimaryKeyConstraint('pagecalculatorjobid', 'version') + ) + + +def downgrade(): + op.drop_table('PageCalculatorJob') diff --git a/api/reviewer_api/models/DocumentDeletedPages.py b/api/reviewer_api/models/DocumentDeletedPages.py new file mode 100644 index 000000000..9ef0ef72f --- /dev/null +++ b/api/reviewer_api/models/DocumentDeletedPages.py @@ -0,0 +1,49 @@ +from .db import db, ma +from datetime import datetime as datetime2 +from sqlalchemy.dialects.postgresql import JSON, insert +from sqlalchemy import or_, and_, text +from .default_method_result import DefaultMethodResult +from reviewer_api.models.Documents import Document +import logging + +class DocumentDeletedPage(db.Model): + __tablename__ = 'DocumentDeletedPages' + # Defining the columns + id = db.Column(db.Integer, primary_key=True, autoincrement=True) + redactionlayerid = db.Column(db.Integer, primary_key=True, nullable=False) + ministryrequestid = db.Column(db.Integer, nullable=False) + documentid = db.Column(db.Integer, nullable=False) + pagemetadata = db.Column(JSON, nullable=False) + created_at = db.Column(db.DateTime, default=datetime2.now) + createdby = db.Column(JSON, unique=False, nullable=False) + + + @classmethod + def create(cls, ministryrequestid, docpages, pagemappings) -> DefaultMethodResult: + try: + insertstmt = insert(DocumentDeletedPage).values(docpages) + db.session.execute(insertstmt) + db.session.bulk_update_mappings(Document, pagemappings) + db.session.commit() + return DefaultMethodResult(True, "Deleted page details saved", ministryrequestid) + except Exception as ex: + logging.error(ex) + return DefaultMethodResult(False, "Deleted page details persist operation failed", ministryrequestid) + finally: + db.session.close() + + @classmethod + def getdeletedpages(cls, ministryrequestid, docids): + try: + deletepage_schema = DocumentDeletedSchema(many=True) + query = db.session.query(DocumentDeletedPage).filter(DocumentDeletedPage.ministryrequestid == ministryrequestid, DocumentDeletedPage.documentid.in_(docids)).all() + return deletepage_schema.dump(query) + except Exception as ex: + logging.error(ex) + finally: + db.session.close() + + +class DocumentDeletedSchema(ma.Schema): + class Meta: + fields = ('id', 'version', 'redactionlayerid', 'ministryrequestid', 'documentid','pagemetadata', 'created_at', 'createdby') \ No newline at end of file diff --git a/api/reviewer_api/models/DocumentMaster.py b/api/reviewer_api/models/DocumentMaster.py index 920e00d38..cca2050ec 100644 --- a/api/reviewer_api/models/DocumentMaster.py +++ b/api/reviewer_api/models/DocumentMaster.py @@ -208,7 +208,7 @@ def getdocumentproperty(cls, ministryrequestid, deleted): documentmasters = [] try: sql = """select dm.documentmasterid, dm.processingparentid, d.documentid, d.version, - dhc.rank1hash, d.filename, d.pagecount, dm.parentid from "DocumentMaster" dm, + dhc.rank1hash, d.filename, d.originalpagecount, d.pagecount, dm.parentid from "DocumentMaster" dm, "Documents" d, "DocumentHashCodes" dhc where dm.ministryrequestid = :ministryrequestid and dm.ministryrequestid = d.foiministryrequestid and dm.documentmasterid = d.documentmasterid @@ -216,7 +216,7 @@ def getdocumentproperty(cls, ministryrequestid, deleted): rs = db.session.execute(text(sql), {'ministryrequestid': ministryrequestid}) for row in rs: if (row["processingparentid"] is not None and row["processingparentid"] not in deleted) or (row["processingparentid"] is None and row["documentmasterid"] not in deleted): - documentmasters.append({"documentmasterid": row["documentmasterid"], "processingparentid": row["processingparentid"], "documentid": row["documentid"], "rank1hash": row["rank1hash"], "filename": row["filename"], "pagecount": row["pagecount"], "parentid": row["parentid"], "version": row["version"]}) + documentmasters.append({"documentmasterid": row["documentmasterid"], "processingparentid": row["processingparentid"], "documentid": row["documentid"], "rank1hash": row["rank1hash"], "filename": row["filename"], "originalpagecount": row["originalpagecount"],"pagecount": row["pagecount"], "parentid": row["parentid"], "version": row["version"]}) except Exception as ex: logging.error(ex) db.session.close() diff --git a/api/reviewer_api/models/DocumentPageflags.py b/api/reviewer_api/models/DocumentPageflags.py index 02b059997..2c70b49ca 100644 --- a/api/reviewer_api/models/DocumentPageflags.py +++ b/api/reviewer_api/models/DocumentPageflags.py @@ -212,7 +212,44 @@ def getpageflag( db.session.close() @classmethod - def getpageflag_by_request(cls, _foiministryrequestid, redactionlayerid, documentids): + def getpageflag_by_request(cls, _foiministryrequestid, redactionlayerid): + pageflags = [] + try: + sql = """select distinct on (dp.documentid) dp.documentid, dp.documentversion, dp.pageflag + from "DocumentPageflags" dp + join "Documents" d on dp.documentid = d.documentid and d.foiministryrequestid = :foiministryrequestid + --join "DocumentMaster" dm on dm.documentmasterid = d.documentmasterid and dm.ministryrequestid = :foiministryrequestid + --left join "DocumentDeleted" dd on dm.filepath ilike dd.filepath || '%' and dd.ministryrequestid = :foiministryrequestid + where dp.foiministryrequestid = :foiministryrequestid --and (dd.deleted is false or dd.deleted is null) + and redactionlayerid in :redactionlayerid + order by dp.documentid, dp.documentversion desc, dp.id desc; + """ + rs = db.session.execute( + text(sql), + { + "foiministryrequestid": _foiministryrequestid, + "redactionlayerid": tuple(redactionlayerid), + }, + ) + + for row in rs: + pageflags.append( + { + "documentid": row["documentid"], + "documentversion": row["documentversion"], + "pageflag": row["pageflag"], + } + ) + except Exception as ex: + logging.error(ex) + db.session.close() + raise ex + finally: + db.session.close() + return pageflags + + @classmethod + def getpageflag_by_request_documentids(cls, _foiministryrequestid, redactionlayerid, documentids): pageflags = [] try: sql = """select distinct on (dp.documentid) dp.documentid, dp.documentversion, dp.pageflag diff --git a/api/reviewer_api/models/Documents.py b/api/reviewer_api/models/Documents.py index d5d6c31a3..ddb707961 100644 --- a/api/reviewer_api/models/Documents.py +++ b/api/reviewer_api/models/Documents.py @@ -1,6 +1,6 @@ from .db import db, ma from .default_method_result import DefaultMethodResult -from sqlalchemy import or_, and_ +from sqlalchemy import or_, and_, bindparam, update from sqlalchemy.dialects.postgresql import JSON from datetime import datetime as datetime2 from sqlalchemy.orm import relationship, backref, aliased @@ -31,6 +31,7 @@ class Document(db.Model): updatedby = db.Column(JSON, unique=False, nullable=True) updated_at = db.Column(db.DateTime, nullable=True) statusid = db.Column(db.Integer, db.ForeignKey('DocumentStatus.statusid')) + originalpagecount = db.Column(db.Integer, nullable=True) pagecount = db.Column(db.Integer, nullable=True) incompatible = db.Column(db.Boolean, nullable=True) documentstatus = relationship("DocumentStatus", backref=backref("DocumentStatus"), uselist=False) @@ -146,6 +147,20 @@ def getdocumentidsbyrequest(cls, ministryrequestid): finally: db.session.close() + @classmethod + def getactivedocumentidsbyrequest(cls, ministryrequestid, deletedmasterids): + try: + query = db.session.query(Document.documentid).distinct().filter( + Document.foiministryrequestid == ministryrequestid, + Document.documentmasterid.notin_(deletedmasterids)).all() + return [r.documentid for r in query] + except Exception as ex: + logging.error(ex) + finally: + db.session.close() + return [] + + @classmethod def getdocumentsbyids(cls, idlist): try: @@ -324,6 +339,44 @@ def __preparedocument(cls, document, created_at, updated_at): 'divisions': document.attributes['divisions'], 'pagecount': document.pagecount } + + + @DeprecationWarning + def updatepagecount(cls, ministryrequestid, pagemappings) -> DefaultMethodResult: + try: + db.session.bulk_update_mappings(Document, pagemappings) + db.session.commit() + return DefaultMethodResult(True, "Document pagecount updated", ministryrequestid) + except Exception as ex: + logging.error(ex) + return DefaultMethodResult(False, "ocument pagecount update operation failed", ministryrequestid) + finally: + db.session.close() + + + @classmethod + def getdocumentpagedatabyrequest(cls, ministryrequestid): + docs = {} + try: + sql = """ + select distinct on (docs.documentid) docs.documentid, docs.pagecount + from "Documents" docs + where docs.foiministryrequestid = :ministryrequestid + order by docs.documentid, docs.version desc + """ + rs = db.session.execute( + text(sql), + {"ministryrequestid": ministryrequestid}, + ) + for row in rs: + docs[row['documentid']]=row['pagecount'] + return docs + except Exception as ex: + logging.error(ex) + finally: + db.session.close() + + # subquery to fetch the earliest uploaded, non-deleted duplicates in a request @classmethod diff --git a/api/reviewer_api/models/PageCalculatorJob.py b/api/reviewer_api/models/PageCalculatorJob.py new file mode 100644 index 000000000..3ed802627 --- /dev/null +++ b/api/reviewer_api/models/PageCalculatorJob.py @@ -0,0 +1,51 @@ +from .db import db, ma +from datetime import datetime as datetime2 +from sqlalchemy.dialects.postgresql import JSON +from sqlalchemy import func, and_ +from .default_method_result import DefaultMethodResult +import logging + + +class PageCalculatorJob(db.Model): + __tablename__ = "PageCalculatorJob" + # Defining the columns + pagecalculatorjobid = db.Column(db.Integer, primary_key=True, autoincrement=True) + version = db.Column(db.Integer, primary_key=True, nullable=False) + ministryrequestid = db.Column(db.Integer, nullable=False) + inputmessage = db.Column(JSON, nullable=False) + pagecount = db.Column(JSON, nullable=True) + status = db.Column(db.String(120), nullable=False) + message = db.Column(db.Text, nullable=True) + createdat = db.Column(db.DateTime, default=datetime2.now, nullable=False) + createdby = db.Column(db.String(120), nullable=False) + + @classmethod + def insert(cls, row): + try: + db.session.add(row) + db.session.commit() + return DefaultMethodResult( + True, + "PageCalculatorJob recorded for ministryrequestid: {0}".format( + row.ministryrequestid + ), + row.pagecalculatorjobid, + ) + except Exception as ex: + logging.error(ex) + finally: + db.session.close() + +class PageCalculatorJobSchema(ma.Schema): + class Meta: + fields = ( + "pagecalculatorjobid", + "version", + "ministryrequestid", + "inputmessage", + "pagecount", + "status", + "message", + "createdat", + "createdby", + ) diff --git a/api/reviewer_api/resources/document.py b/api/reviewer_api/resources/document.py index 66702db52..cbed4e54c 100644 --- a/api/reviewer_api/resources/document.py +++ b/api/reviewer_api/resources/document.py @@ -22,12 +22,13 @@ from reviewer_api.tracer import Tracer from reviewer_api.utils.util import cors_preflight, allowedorigins, getrequiredmemberships from reviewer_api.exceptions import BusinessException -from reviewer_api.schemas.document import FOIRequestDeleteRecordsSchema, FOIRequestUpdateRecordsSchema +from reviewer_api.schemas.document import FOIRequestDeleteRecordsSchema, FOIRequestUpdateRecordsSchema, DocumentDeletedPage import json import requests import logging from reviewer_api.services.documentservice import documentservice +from reviewer_api.services.docdeletedpageservice import docdeletedpageservice API = Namespace('Document Services', description='Endpoints for deleting and replacing documents') TRACER = Tracer.get_instance() @@ -112,3 +113,39 @@ def get(requestid): except requests.exceptions.HTTPError as err: logging.error("Request Management API returned the following message: {0} - {1}".format(err.response.status_code, err.response.text)) return {'status': False, 'message': err.response.text}, err.response.status_code + + +@cors_preflight('POST,OPTIONS') +@API.route('/document/ministryrequest//deletedpages') +class DeleteDocumenPage(Resource): + @staticmethod + @TRACER.trace() + @cross_origin(origins=allowedorigins()) + @auth.require + def post(ministryrequestid): + try: + payload = request.get_json() + payload = DocumentDeletedPage().load(payload) + result = docdeletedpageservice().newdeletepages(ministryrequestid, payload, AuthHelper.getuserinfo()) + return {'status': result.success, 'message':result.message,'id':result.identifier} , 200 + except ValueError as error: + return {'status': False, 'message': CUSTOM_KEYERROR_MESSAGE + str(error)}, 400 + except BusinessException as exception: + return {'status': exception.status_code, 'message':exception.message}, 500 + + +@cors_preflight('GET,OPTIONS') +@API.route('/document/ministryrequest//deletedpages') +class DeleteDocumenPage(Resource): + @staticmethod + @TRACER.trace() + @cross_origin(origins=allowedorigins()) + @auth.require + def get(ministryrequestid): + try: + result = docdeletedpageservice().getdeletedpages(ministryrequestid) + return json.dumps(result), 200 + except ValueError as error: + return {'status': False, 'message': CUSTOM_KEYERROR_MESSAGE + str(error)}, 400 + except BusinessException as exception: + return {'status': exception.status_code, 'message':exception.message}, 500 diff --git a/api/reviewer_api/resources/documentpageflag.py b/api/reviewer_api/resources/documentpageflag.py index 34caabf1f..3f01c26c5 100644 --- a/api/reviewer_api/resources/documentpageflag.py +++ b/api/reviewer_api/resources/documentpageflag.py @@ -54,26 +54,6 @@ def post(requestid): return {'status': exception.status_code, 'message':exception.message}, 500 -@cors_preflight('GET,OPTIONS') -@API.route('/ministryrequest//document//version//pageflag/') -class GetDocumentPageflag(Resource): - """Get document page flag list. - """ - @staticmethod - @TRACER.trace() - @cross_origin(origins=allowedorigins()) - @auth.require - @auth.ismemberofgroups(getrequiredmemberships()) - def get(requestid, documentid, documentversion, redactionlayerid): - try: - result = documentpageflagservice().getdocumentpageflags(requestid,redactionlayerid, documentid, documentversion) - return json.dumps(result), 200 - except KeyError as error: - return {'status': False, 'message': CUSTOM_KEYERROR_MESSAGE + str(error)}, 400 - except BusinessException as exception: - return {'status': exception.status_code, 'message':exception.message}, 500 - - @cors_preflight('GET,OPTIONS') @API.route('/ministryrequest//pageflag/') class GetDocumentPageflag(Resource): @@ -87,7 +67,7 @@ class GetDocumentPageflag(Resource): def get(requestid, redactionlayer): try: documentids = request.args.getlist('documentids[]') - result = documentpageflagservice().getpageflags(requestid, redactionlayer, documentids) + result = documentpageflagservice().getpageflags_by_requestid_docids(requestid, redactionlayer, documentids) return json.dumps(result), 200 except KeyError as error: return {'status': False, 'message': CUSTOM_KEYERROR_MESSAGE + str(error)}, 400 diff --git a/api/reviewer_api/resources/jobstatus.py b/api/reviewer_api/resources/jobstatus.py index 999c026eb..e15e30f2f 100644 --- a/api/reviewer_api/resources/jobstatus.py +++ b/api/reviewer_api/resources/jobstatus.py @@ -108,5 +108,26 @@ def get(requestid, category): return {'status': False, 'message': CUSTOM_KEYERROR_MESSAGE + str(error)}, 400 except BusinessException as exception: return {'status': exception.status_code, 'message':exception.message}, 500 + +# this is for inflight request pagecount calculation option 2 +@cors_preflight('POST,OPTIONS') +@API.route('/pagecalculatorjobstatus') +class AddPageCalculatorJobStatus(Resource): + """Insert entries into job record table. + """ + @staticmethod + # @TRACER.trace() + @cross_origin(origins=allowedorigins()) + @auth.require + def post(): + try: + requestjson = request.get_json() + result = jobrecordservice().insertpagecalculatorjobstatus(requestjson, AuthHelper.getuserid()) + respcode = 200 if result.success == True else 500 + return {'status': result.success, 'message':result.message,'id':result.identifier}, respcode + except KeyError as error: + return {'status': False, 'message': CUSTOM_KEYERROR_MESSAGE + str(error)}, 400 + except BusinessException as exception: + return {'status': exception.status_code, 'message':exception.message}, 500 \ No newline at end of file diff --git a/api/reviewer_api/schemas/document.py b/api/reviewer_api/schemas/document.py index fd13b55a4..6bd0fa070 100644 --- a/api/reviewer_api/schemas/document.py +++ b/api/reviewer_api/schemas/document.py @@ -31,4 +31,21 @@ class Meta: # pylint: disable=too-few-public-methods unknown = EXCLUDE documentmasterids = fields.List(fields.Integer(),data_key="documentmasterids",allow_none=False) ministryrequestid = fields.Int(data_key="ministryrequestid",allow_none=False) - divisions = fields.Nested(DivisionSchema,many=True,validate=validate.Length(min=1),allow_none=False) \ No newline at end of file + divisions = fields.Nested(DivisionSchema,many=True,validate=validate.Length(min=1),allow_none=False) + + +class DocumentPage(Schema): + class Meta: # pylint: disable=too-few-public-methods + """Exclude unknown fields in the deserialized output.""" + + unknown = EXCLUDE + docid = fields.Int(data_key="docid",allow_none=False) + pages = fields.List(fields.Integer(),data_key="pages",allow_none=False) + +class DocumentDeletedPage(Schema): + class Meta: # pylint: disable=too-few-public-methods + """Exclude unknown fields in the deserialized output.""" + + unknown = EXCLUDE + redactionlayer = fields.String(data_key="redactionlayer",allow_none=False) + documentpages = fields.Nested(DocumentPage,many=True,validate=validate.Length(min=1),allow_none=False) \ No newline at end of file diff --git a/api/reviewer_api/services/docdeletedpageservice.py b/api/reviewer_api/services/docdeletedpageservice.py new file mode 100644 index 000000000..38f1ff8e6 --- /dev/null +++ b/api/reviewer_api/services/docdeletedpageservice.py @@ -0,0 +1,71 @@ +from reviewer_api.models.DocumentDeletedPages import DocumentDeletedPage +from reviewer_api.models.Documents import Document +from reviewer_api.models.DocumentMaster import DocumentMaster +from reviewer_api.models.PageCalculatorJob import PageCalculatorJob +from datetime import datetime +from reviewer_api.services.redactionlayerservice import redactionlayerservice +from reviewer_api.services.external.eventqueueproducerservice import eventqueueproducerservice +from os import getenv + +pagecalculatorstreamkey = getenv("PAGECALCULATOR_STREAM_KEY") + +class docdeletedpageservice: + + def newdeletepages(self, ministryid, docdeletedpage, userinfo): + layerid = self.__getredactionlayerid(docdeletedpage["redactionlayer"]) + docs = Document.getdocumentpagedatabyrequest(ministryid) + docpages = [] + docpagecounts= [] + for entry in docdeletedpage["documentpages"]: + docid = entry["docid"] + docpages.append({ + "redactionlayerid": layerid, + "ministryrequestid": ministryid, + "documentid": docid, + "pagemetadata": entry["pages"], + "createdby": userinfo + }) + docpagecounts.append({ + "documentid": docid, + "foiministryrequestid": ministryid, + "version":1, + "pagecount": docs[docid]-len(entry["pages"]), + "updatedby": userinfo, + "updated_at": datetime.now() + }) + result = DocumentDeletedPage().create(ministryid, docpages, docpagecounts) + if result.success: + streamobject = { + 'ministryrequestid': ministryid + } + row = PageCalculatorJob( + version=1, + ministryrequestid=ministryid, + inputmessage=streamobject, + status='pushedtostream', + createdby='deletepages' + ) + job = PageCalculatorJob.insert(row) + streamobject["jobid"] = job.identifier + streamobject["createdby"] = 'delete' + eventqueueproducerservice().add(pagecalculatorstreamkey, streamobject) + return result + + def getdeletedpages(self, ministryid): + deletedmasterids = DocumentMaster.getdeleted(ministryid) + activedocumentids = Document.getactivedocumentidsbyrequest(ministryid, deletedmasterids) + deletedpages = DocumentDeletedPage().getdeletedpages(ministryid, activedocumentids) + documentpages = {} + if deletedpages: + for entry in deletedpages: + if entry["documentid"] not in documentpages: + documentpages[entry["documentid"]] = entry["pagemetadata"] + else: + pages = documentpages[entry["documentid"]]+entry["pagemetadata"] + documentpages[entry["documentid"]] = list(set(pages)) + return documentpages + + + def __getredactionlayerid(self, layername): + return redactionlayerservice().getredactionlayerid(layername.lower()) + diff --git a/api/reviewer_api/services/documentpageflagservice.py b/api/reviewer_api/services/documentpageflagservice.py index 206150707..9345ca8d4 100644 --- a/api/reviewer_api/services/documentpageflagservice.py +++ b/api/reviewer_api/services/documentpageflagservice.py @@ -6,13 +6,14 @@ from reviewer_api.services.redactionlayerservice import redactionlayerservice from reviewer_api.models.default_method_result import DefaultMethodResult from datetime import datetime - +from reviewer_api.services.docdeletedpageservice import docdeletedpageservice class documentpageflagservice: - def getpageflags(self, requestid, redactionlayer, documentids): + def getpageflags_by_requestid_docids(self, requestid, redactionlayer, documentids): layerids = [] layerids.append(redactionlayerservice().getredactionlayerid(redactionlayer)) - return DocumentPageflag.getpageflag_by_request(requestid, layerids, documentids) + pageflags = DocumentPageflag.getpageflag_by_request_documentids(requestid, layerids, documentids) + return self.__removedeletedpages(requestid, pageflags) def getpublicbody(self, requestid, redactionlayer): redactionlayerid = redactionlayerservice().getredactionlayerid(redactionlayer) @@ -31,6 +32,18 @@ def getdocumentpageflags( return pageflag["pageflag"], pageflag["attributes"] return [], None + def __removedeletedpages(self, requestid, pageflags): + docdeletedpages = docdeletedpageservice().getdeletedpages(requestid) + for entry in pageflags: + docid = entry["documentid"] + deletedpages = docdeletedpages[docid] if docid in docdeletedpages else [] + entry["pageflag"] = self.__filterpages(entry["pageflag"], deletedpages) + return pageflags + + def __filterpages(self, pageflag, deletedpages): + return list(filter(lambda pgflag: pgflag['page'] not in deletedpages, pageflag)) + + def getdocumentpageflagsbydocids(self, requestid, redactionlayerid, documentids): layerids = redactionlayerservice().getmappedredactionlayers( {"redactionlayerid": redactionlayerid} @@ -49,6 +62,10 @@ def removebookmark(self, requestid, redactionlayerid, userinfo, documentids): json.dumps(userinfo), redactionlayerid, ) + def __getpageflags(self, requestid, redactionlayer): + layerids = [] + layerids.append(redactionlayerservice().getredactionlayerid(redactionlayer)) + return DocumentPageflag.getpageflag_by_request_documentids(requestid, layerids) def bulksavedocumentpageflag( self, requestid, documentid, version, pageflags, redactionlayerid, userinfo diff --git a/api/reviewer_api/services/documentservice.py b/api/reviewer_api/services/documentservice.py index 65b72b55c..2a390adbe 100644 --- a/api/reviewer_api/services/documentservice.py +++ b/api/reviewer_api/services/documentservice.py @@ -2,6 +2,7 @@ from reviewer_api.models.DocumentMaster import DocumentMaster from reviewer_api.models.FileConversionJob import FileConversionJob from reviewer_api.models.DeduplicationJob import DeduplicationJob +from reviewer_api.models.PageCalculatorJob import PageCalculatorJob from datetime import datetime as datetime2, timezone from os import path from reviewer_api.models.DocumentDeleted import DocumentDeleted @@ -9,13 +10,14 @@ from reviewer_api.utils.util import pstformat from reviewer_api.models.DocumentAttributes import DocumentAttributes from reviewer_api.services.pdfstitchpackageservice import pdfstitchpackageservice +from reviewer_api.services.external.eventqueueproducerservice import eventqueueproducerservice import requests from reviewer_api.auth import auth, AuthHelper from os import getenv from reviewer_api.utils.enums import StateName requestapiurl = getenv("FOI_REQ_MANAGEMENT_API_URL") - +pagecalculatorstreamkey = getenv("PAGECALCULATOR_STREAM_KEY") class documentservice: def getdedupestatus(self, requestid): @@ -39,7 +41,6 @@ def getdedupestatus(self, requestid): record["attachments"] = self.__getattachments( records, record["documentmasterid"], [] ) - # Duplicate check finalresults = [] ( @@ -75,6 +76,7 @@ def __updateproperties( if record["recordid"] is not None: _att_in_properties = [] ( + record["originalpagecount"], record["pagecount"], record["filename"], record["documentid"], @@ -121,6 +123,7 @@ def __updateproperties( ) = self.__isduplicate(_att_in_properties, attachment) ( + attachment["originalpagecount"], attachment["pagecount"], attachment["filename"], attachment["documentid"], @@ -142,6 +145,7 @@ def __filterrecords(self, records): return parentrecords, parentswithattachments, attchments def __getpagecountandfilename(self, record, properties): + originalpagecount = 0 pagecount = 0 filename = record["filename"] if "filename" in record else None documentid = None @@ -151,11 +155,12 @@ def __getpagecountandfilename(self, record, properties): property["processingparentid"] is None and record["documentmasterid"] == property["documentmasterid"] ): + originalpagecount = property["originalpagecount"] pagecount = property["pagecount"] filename = property["filename"] documentid = property["documentid"] version = property["version"] - return pagecount, filename, documentid, version + return originalpagecount, pagecount, filename, documentid, version def __getduplicatemsgattachment(self, records, attachmentproperties, attachment): _occurances = [] @@ -354,7 +359,7 @@ def __getuploadedrecord(self, records, masterid): def deletedocument(self, payload, userid): """Inserts document into list of deleted documents""" - return DocumentDeleted.create( + result = DocumentDeleted.create( [ DocumentDeleted( filepath=path.splitext(filepath)[0], @@ -366,6 +371,22 @@ def deletedocument(self, payload, userid): for filepath in payload["filepaths"] ] ) + if result.success: + streamobject = { + 'ministryrequestid': payload["ministryrequestid"] + } + row = PageCalculatorJob( + version=1, + ministryrequestid=payload["ministryrequestid"], + inputmessage=streamobject, + status='pushedtostream', + createdby='delete' + ) + job = PageCalculatorJob.insert(row) + streamobject["jobid"] = job.identifier + streamobject["createdby"] = 'delete' + eventqueueproducerservice().add(pagecalculatorstreamkey, streamobject) + return result def updatedocumentattributes(self, payload, userid): """update document attributes""" @@ -418,7 +439,6 @@ def getdocuments(self, requestid,bcgovcode): for document in self.getdedupestatus(requestid) } attachments = [] - for documentid in documents: _attachments = documents[documentid].pop("attachments", []) for attachment in _attachments: diff --git a/api/reviewer_api/services/external/zipperproducerservice.py b/api/reviewer_api/services/external/eventqueueproducerservice.py similarity index 80% rename from api/reviewer_api/services/external/zipperproducerservice.py rename to api/reviewer_api/services/external/eventqueueproducerservice.py index 11f699911..27624a66a 100644 --- a/api/reviewer_api/services/external/zipperproducerservice.py +++ b/api/reviewer_api/services/external/eventqueueproducerservice.py @@ -5,7 +5,7 @@ from os import getenv -class zipperproducerservice: +class eventqueueproducerservice: """This class is reserved for integration with event queue (currently redis streams).""" host = os.getenv("ZIPPER_REDIS_HOST") @@ -14,15 +14,12 @@ class zipperproducerservice: db = Database(host=host, port=port, db=0, password=password) - def add(self, payload): + def add(self, streamkey, payload): try: - stream = self.db.Stream(self.__streamkey()) + stream = self.db.Stream(streamkey) msgid = stream.add(payload, id="*") return DefaultMethodResult(True, "Added to stream", msgid.decode("utf-8")) except Exception as err: logging.error("Error in contacting Redis Stream") logging.error(err) return DefaultMethodResult(False, err, -1) - - def __streamkey(self): - return getenv("ZIPPER_STREAM_KEY") \ No newline at end of file diff --git a/api/reviewer_api/services/jobrecordservice.py b/api/reviewer_api/services/jobrecordservice.py index ffc6a992c..587c94bc4 100644 --- a/api/reviewer_api/services/jobrecordservice.py +++ b/api/reviewer_api/services/jobrecordservice.py @@ -1,6 +1,7 @@ from reviewer_api.models.FileConversionJob import FileConversionJob from reviewer_api.models.DeduplicationJob import DeduplicationJob from reviewer_api.models.PDFStitchJob import PDFStitchJob +from reviewer_api.models.PageCalculatorJob import PageCalculatorJob from reviewer_api.models.DocumentMaster import DocumentMaster from reviewer_api.models.DocumentAttributes import DocumentAttributes from reviewer_api.services.annotationservice import annotationservice @@ -116,3 +117,15 @@ def recordjobstatus(self, batchinfo, userid): else: jobids[record['s3uripath']] = {'error': 'Invalid file type'} return jobids + + + def insertpagecalculatorjobstatus(self, message, userid): + row = PageCalculatorJob( + version=1, + ministryrequestid=message['ministryrequestid'], + inputmessage=message, + status='pushedtostream', + createdby=userid + ) + job = PageCalculatorJob.insert(row) + return job diff --git a/api/reviewer_api/services/radactionservice.py b/api/reviewer_api/services/radactionservice.py index 36a87d55e..776deede8 100644 --- a/api/reviewer_api/services/radactionservice.py +++ b/api/reviewer_api/services/radactionservice.py @@ -236,4 +236,4 @@ def __preparemessageforjobstatus(self, messageschema): "ministryrequestid": int(messageschema["ministryrequestid"]), "inputfiles": messageschema["attributes"], } - return __message + return __message \ No newline at end of file diff --git a/api/sample.env b/api/sample.env index d46e2bd5b..fff2ffe59 100644 --- a/api/sample.env +++ b/api/sample.env @@ -66,4 +66,10 @@ TEST_MINISTRY_USERID=foiedu@idir TEST_MINISTRY_PASSWORD= #Bulk Redaction - Configuration -BATCH_CONFIG={"begin": 2, "size": 100, "limit": 250} \ No newline at end of file +BATCH_CONFIG={"begin": 2, "size": 100, "limit": 250} + + +EVENT_QUEUE_HOST= +EVENT_QUEUE_PASSWORD= +EVENT_QUEUE_PORT= +PAGECALCULATOR_STREAM_KEY= \ No newline at end of file diff --git a/computingservices/DedupeServices/models/pagecalculatorproducermessage.py b/computingservices/DedupeServices/models/pagecalculatorproducermessage.py new file mode 100644 index 000000000..9a9bef0d9 --- /dev/null +++ b/computingservices/DedupeServices/models/pagecalculatorproducermessage.py @@ -0,0 +1,10 @@ +class pagecalculatorproducermessage(object): + def __init__(self,jobid,filename,pagecount,ministryrequestid,documentmasterid,trigger,createdby) -> None: + self.jobid = jobid + self.filename = filename + self.pagecount = pagecount + self.ministryrequestid = ministryrequestid + self.documentmasterid = documentmasterid + self.trigger = trigger + self.createdby = createdby + \ No newline at end of file diff --git a/computingservices/DedupeServices/services/dedupedbservice.py b/computingservices/DedupeServices/services/dedupedbservice.py index 92d68febf..efeff1759 100644 --- a/computingservices/DedupeServices/services/dedupedbservice.py +++ b/computingservices/DedupeServices/services/dedupedbservice.py @@ -1,5 +1,6 @@ from . import getdbconnection from models import dedupeproducermessage +from utils.basicutils import to_json from datetime import datetime import json @@ -11,9 +12,9 @@ def savedocumentdetails(dedupeproducermessage, hashcode, pagecount = 1): _incompatible = True if str(dedupeproducermessage.incompatible).lower() == 'true' else False cursor.execute('INSERT INTO public."Documents" (version, \ - filename, documentmasterid,foiministryrequestid,createdby,created_at,statusid,incompatible,pagecount) VALUES(%s::integer, %s, %s,%s::integer,%s,%s,%s::integer,%s::bool,%s::integer) RETURNING documentid;', + filename, documentmasterid,foiministryrequestid,createdby,created_at,statusid,incompatible, originalpagecount, pagecount) VALUES(%s::integer, %s, %s,%s::integer,%s,%s,%s::integer,%s::bool,%s::integer,%s::integer) RETURNING documentid;', (1, dedupeproducermessage.filename, dedupeproducermessage.outputdocumentmasterid or dedupeproducermessage.documentmasterid, - dedupeproducermessage.ministryrequestid,'{"user":"dedupeservice"}',datetime.now(),1,_incompatible,pagecount)) + dedupeproducermessage.ministryrequestid,'{"user":"dedupeservice"}',datetime.now(),1,_incompatible,pagecount,pagecount)) conn.commit() id_of_new_row = cursor.fetchone() @@ -165,5 +166,27 @@ def isbatchcompleted(batch): conn.close() +def pagecalculatorjobstart(message): + conn = getdbconnection() + try: + + cursor = conn.cursor() + cursor.execute('''INSERT INTO public."PageCalculatorJob" + (version, ministryrequestid, inputmessage, status, createdby) + VALUES (%s::integer, %s::integer, %s, %s, %s) returning pagecalculatorjobid;''', + (1, message.ministryrequestid, to_json(message), 'pushedtostream', 'dedupeservice')) + pagecalculatorjobid = cursor.fetchone()[0] + conn.commit() + cursor.close() + print("Inserted pagecalculatorjobid:", pagecalculatorjobid) + return pagecalculatorjobid + except(Exception) as error: + print("Exception while executing func recordjobstart (p6), Error : {0} ".format(error)) + raise + finally: + if conn is not None: + conn.close() + + diff --git a/computingservices/DedupeServices/services/dedupeservice.py b/computingservices/DedupeServices/services/dedupeservice.py index d434c737b..0799beea4 100644 --- a/computingservices/DedupeServices/services/dedupeservice.py +++ b/computingservices/DedupeServices/services/dedupeservice.py @@ -1,6 +1,8 @@ from .s3documentservice import gets3documenthashcode -from .dedupedbservice import savedocumentdetails, recordjobstart, recordjobend, updateredactionstatus +from .dedupedbservice import savedocumentdetails, recordjobstart, recordjobend, updateredactionstatus, pagecalculatorjobstart +from .documentspagecalculatorservice import documentspagecalculatorproducerservice +from models.pagecalculatorproducermessage import pagecalculatorproducermessage import traceback @@ -11,6 +13,12 @@ def processmessage(message): savedocumentdetails(message, hashcode, _pagecount) recordjobend(message, False) updateredactionstatus(message) + _incompatible = True if str(message.incompatible).lower() == 'true' else False + if not _incompatible: + pagecalculatormessage = documentspagecalculatorproducerservice().createpagecalculatorproducermessage(message, _pagecount) + pagecalculatorjobid = pagecalculatorjobstart(pagecalculatormessage) + print("Pushed to Page Calculator Stream!!!") + documentspagecalculatorproducerservice().producepagecalculatorevent(pagecalculatormessage, _pagecount, pagecalculatorjobid) except(Exception) as error: print("Exception while processing redis message, func processmessage(p3), Error : {0} ".format(error)) recordjobend(message, True, traceback.format_exc()) \ No newline at end of file diff --git a/computingservices/DedupeServices/services/documentspagecalculatorservice.py b/computingservices/DedupeServices/services/documentspagecalculatorservice.py new file mode 100644 index 000000000..c2fc160a9 --- /dev/null +++ b/computingservices/DedupeServices/services/documentspagecalculatorservice.py @@ -0,0 +1,27 @@ +from utils.foidedupeconfig import pagecalculatorredishost,pagecalculatorredispassword,pagecalculatorredisport,pagecalculatorstreamkey,health_check_interval +from walrus import Database +from models.pagecalculatorproducermessage import pagecalculatorproducermessage +from utils.basicutils import to_json + +class documentspagecalculatorproducerservice: + pagecalculatorredisdb = None + pagecalculatorredisstream = None + def __init__(self) -> None: + self.pagecalculatorredisdb = Database(host=str(pagecalculatorredishost), port=str(pagecalculatorredisport), db=0,password=str(pagecalculatorredispassword), retry_on_timeout=True, health_check_interval=int(health_check_interval), socket_keepalive=True) + self.pagecalculatorredisstream = self.pagecalculatorredisdb.Stream(pagecalculatorstreamkey) + + def producepagecalculatorevent(self, finalmessage, pagecount, jobid): + try: + _pagecalculatorrequest = self.createpagecalculatorproducermessage(finalmessage, pagecount, jobid=jobid) + _pagecalculatorredisstream = self.pagecalculatorredisstream + if _pagecalculatorredisstream is not None: + return _pagecalculatorredisstream.add(_pagecalculatorrequest.__dict__,id="*") + except (Exception) as error: + print(error) + raise error + + def createpagecalculatorproducermessage(self,message, pagecount, jobid = 0): + return pagecalculatorproducermessage(jobid=jobid, filename=message.filename, pagecount=pagecount, + ministryrequestid=message.ministryrequestid, documentmasterid=message.documentmasterid, + trigger=message.trigger,createdby='dedupeservice') + diff --git a/computingservices/DedupeServices/utils/basicutils.py b/computingservices/DedupeServices/utils/basicutils.py new file mode 100644 index 000000000..be657fedb --- /dev/null +++ b/computingservices/DedupeServices/utils/basicutils.py @@ -0,0 +1,7 @@ +import json +def to_json(obj): + return json.dumps(obj, default=lambda obj: obj.__dict__) + +def add_spacing_around_special_character(special_char, stringvalue): + string= stringvalue.split(special_char) + return f' {special_char} '.join(string) \ No newline at end of file diff --git a/computingservices/DedupeServices/utils/foidedupeconfig.py b/computingservices/DedupeServices/utils/foidedupeconfig.py index b71e6219e..6e648af28 100644 --- a/computingservices/DedupeServices/utils/foidedupeconfig.py +++ b/computingservices/DedupeServices/utils/foidedupeconfig.py @@ -27,6 +27,12 @@ request_management_api = os.getenv("DEDUPE_REQUEST_MANAGEMENT_API") record_formats_path = os.getenv("DEDUPE_RECORD_FORMATS") +pagecalculatorredishost = os.getenv('REDIS_HOST') +pagecalculatorredispassword = os.getenv('REDIS_PASSWORD') +pagecalculatorredisport = os.getenv('REDIS_PORT') +pagecalculatorstreamkey = os.getenv('PAGECALCULATOR_STREAM_KEY') +health_check_interval = os.getenv('HEALTH_CHECK_INTERVAL', 15) + try: response = requests.request( method="GET", diff --git a/computingservices/DocumentServices/services/cdogsapiservice.py b/computingservices/DocumentServices/services/cdogsapiservice.py index 3b1bb4859..ec9d0d4f6 100644 --- a/computingservices/DocumentServices/services/cdogsapiservice.py +++ b/computingservices/DocumentServices/services/cdogsapiservice.py @@ -40,19 +40,15 @@ def upload_template(self, template_path, access_token): "Authorization": f'Bearer {access_token}' } url = f"{cdogs_base_url}/api/v2/template" - if os.path.exists(template_path): - print("Exists!!") template = {'template':('template', open(template_path, 'rb'), "multipart/form-data")} response = self._post_upload_template(headers, url, template) if response.status_code == 200: - print('Returning new hash %s', response.headers['X-Template-Hash']) return response.headers['X-Template-Hash']; response_json = json.loads(response.content) if response.status_code == 405 and response_json['detail'] is not None: match = re.findall(r"Hash '(.*?)'", response_json['detail']); if match: - print('Template already hashed with code %s', match[0]) return match[0] diff --git a/computingservices/DocumentServices/services/dal/documentpageflag.py b/computingservices/DocumentServices/services/dal/documentpageflag.py index 678a37deb..67062bc9a 100644 --- a/computingservices/DocumentServices/services/dal/documentpageflag.py +++ b/computingservices/DocumentServices/services/dal/documentpageflag.py @@ -184,4 +184,33 @@ def getsections_by_documentid_pageno(cls, redactionlayerid, documentid, pagenos) raise finally: if conn is not None: - conn.close() \ No newline at end of file + conn.close() + + @classmethod + def getdeletedpages(cls, ministryrequestid, docids): + conn = getdbconnection() + deldocpages = [] + try: + cursor = conn.cursor() + cursor.execute( + """select id, documentid, pagemetadata + from "DocumentDeletedPages" + where ministryrequestid = %s::integer and documentid in %s + order by documentid, id;""", + (ministryrequestid, tuple(docids)), + ) + + result = cursor.fetchall() + cursor.close() + if result is not None: + for entry in result: + deldocpages.append({"id": entry[0], "documentid": entry[1], "pagemetadata": entry[2]}) + return deldocpages + return None + except Exception as error: + logging.error("Error in getting deletedpages for requestid") + logging.error(error) + raise + finally: + if conn is not None: + conn.close() diff --git a/computingservices/DocumentServices/services/dal/documenttemplate.py b/computingservices/DocumentServices/services/dal/documenttemplate.py index ee6cdca51..c3ba41ad3 100644 --- a/computingservices/DocumentServices/services/dal/documenttemplate.py +++ b/computingservices/DocumentServices/services/dal/documenttemplate.py @@ -39,7 +39,6 @@ def updatecdogshashcode(cls, documenttypeid, cdogshashcode): ''' parameters = (cdogshashcode, documenttypeid,) cursor.execute(query, parameters) - print("DB updated") conn.commit() except(Exception) as error: print("Exception while executing func updatecdogshashcode, Error : {0} ".format(error)) diff --git a/computingservices/DocumentServices/services/documentgenerationservice.py b/computingservices/DocumentServices/services/documentgenerationservice.py index 49f091fe2..600bde016 100644 --- a/computingservices/DocumentServices/services/documentgenerationservice.py +++ b/computingservices/DocumentServices/services/documentgenerationservice.py @@ -39,7 +39,6 @@ def generate_pdf(self, data, documenttypename='redline_redaction_summary', templ if templatefromdb is not None and templatefromdb["cdogs_hash_code"] is not None: template_cached = cdogsapiservice().check_template_cached(templatefromdb["cdogs_hash_code"], access_token) templatecdogshashcode = templatefromdb["cdogs_hash_code"] - #print("template_cached:",template_cached) if templatefromdb is None or templatefromdb["cdogs_hash_code"] is None or not template_cached: templatecdogshashcode = cdogsapiservice().upload_template(template_path, access_token) diff --git a/computingservices/DocumentServices/services/dts/redactionsummary.py b/computingservices/DocumentServices/services/dts/redactionsummary.py index 69360b8d8..bae30030f 100644 --- a/computingservices/DocumentServices/services/dts/redactionsummary.py +++ b/computingservices/DocumentServices/services/dts/redactionsummary.py @@ -3,7 +3,7 @@ class redactionsummary(): def prepareredactionsummary(self, message, documentids, pageflags, programareas): - redactionsummary = self.prepare_pkg_redactionsummary(message, documentids, pageflags, programareas) + redactionsummary = self.__packaggesummary(message, documentids, pageflags, programareas) if message.category == "responsepackage": consolidated_redactions = [] for entry in redactionsummary['data']: @@ -17,7 +17,7 @@ def __getrangenumber(self, rangeval): rangestart = str(rangestart).split('(')[0] return int(rangestart) - def prepare_pkg_redactionsummary(self, message, documentids, pageflags, programareas): + def __packaggesummary(self, message, documentids, pageflags, programareas): try: redactionlayerid = message.redactionlayerid summarymsg = message.summarydocuments @@ -30,13 +30,15 @@ def prepare_pkg_redactionsummary(self, message, documentids, pageflags, programa summarydata = [] docpageflags = documentpageflag().get_documentpageflag(message.ministryrequestid, redactionlayerid, ordereddocids) + deletedpages = self.__getdeletedpages(message.ministryrequestid, ordereddocids) skippages= [] pagecount = 0 for docid in ordereddocids: if docid in documentids: + docdeletedpages = deletedpages[docid] if docid in deletedpages else [] docpageflag = docpageflags[docid] for pageflag in _pageflags: - filteredpages = self.__get_pages_by_flagid(docpageflag["pageflag"], pagecount, pageflag["pageflagid"], message.category) + filteredpages = self.__get_pages_by_flagid(docpageflag["pageflag"], docdeletedpages, pagecount, pageflag["pageflagid"], message.category) if len(filteredpages) > 0: originalpagenos = [pg['originalpageno'] for pg in filteredpages] docpagesections = documentpageflag().getsections_by_documentid_pageno(redactionlayerid, docid, originalpagenos) @@ -55,7 +57,19 @@ def prepare_pkg_redactionsummary(self, message, documentids, pageflags, programa summarydata.append(_data) return {"requestnumber": message.requestnumber, "data": summarydata} except (Exception) as error: - print('error occured in redaction summary service: ', error) + print('error occured in redaction dts service: ', error) + + def __getdeletedpages(self, ministryid, ordereddocids): + deletedpages = documentpageflag().getdeletedpages(ministryid, ordereddocids) + documentpages = {} + if deletedpages: + for entry in deletedpages: + if entry["documentid"] not in documentpages: + documentpages[entry["documentid"]] = entry["pagemetadata"] + else: + pages = documentpages[entry["documentid"]]+entry["pagemetadata"] + documentpages[entry["documentid"]] = list(set(pages)) + return documentpages def __transformpageflags(self, pageflags): for entry in pageflags: @@ -137,12 +151,12 @@ def __get_sections(self, docpagesections, pageno): sections += [x.strip() for x in dta['section'].split(",")] return list(filter(None, sections)) - def __get_pages_by_flagid(self, _docpageflags, totalpages, flagid, category): + def __get_pages_by_flagid(self, _docpageflags, deletedpages, totalpages, flagid, category): pagenos = [] skippages = self.__get_skippagenos(_docpageflags,category) for x in _docpageflags: - if x["flagid"] == flagid: - pagenos.append({'originalpageno':x["page"]-1, 'stitchedpageno':self.__calcstitchedpageno(x["page"], totalpages, category,skippages)}) + if x["flagid"] == flagid and x["page"] not in deletedpages: + pagenos.append({'originalpageno':x["page"]-1, 'stitchedpageno':self.__calcstitchedpageno(x["page"], totalpages, category, skippages, deletedpages)}) return pagenos def __get_skippagenos(self, _docpageflags, category): @@ -153,13 +167,18 @@ def __get_skippagenos(self, _docpageflags, category): skippages.append(x['page']) return skippages - def __calcstitchedpageno(self, pageno, totalpages, category, skippages): + def __calcstitchedpageno(self, pageno, totalpages, category, skippages, deletedpages): skipcount = 0 - if category == "responsepackage": - for sno in skippages: - if sno < pageno: - skipcount=skipcount+1 + if category == "responsepackage": + skipcount = self.__calculateskipcount(pageno, skippages) + skipcount = self.__calculateskipcount(pageno, deletedpages, skipcount) return (pageno+totalpages)-skipcount + + def __calculateskipcount(self, pageno, ignorepages, skipcount=0): + for dno in ignorepages: + if dno < pageno: + skipcount=skipcount+1 + return skipcount def __calculate_totalpages(self, data): diff --git a/computingservices/DocumentServices/services/redactionsummaryservice.py b/computingservices/DocumentServices/services/redactionsummaryservice.py index f68c7b705..f5621e5a0 100644 --- a/computingservices/DocumentServices/services/redactionsummaryservice.py +++ b/computingservices/DocumentServices/services/redactionsummaryservice.py @@ -29,6 +29,7 @@ def processmessage(self,incomingmessage): divisionid = entry.divisionid documentids = entry.documentids formattedsummary = redactionsummary().prepareredactionsummary(message, documentids, pageflags, programareas) + #print("formattedsummary", formattedsummary) template_path='templates/'+documenttypename+'.docx' redaction_summary= documentgenerationservice().generate_pdf(formattedsummary, documenttypename,template_path) messageattributes= message.attributes diff --git a/computingservices/DocumentServices/services/zippingservice.py b/computingservices/DocumentServices/services/zippingservice.py index 2a9489513..d0b152ddd 100644 --- a/computingservices/DocumentServices/services/zippingservice.py +++ b/computingservices/DocumentServices/services/zippingservice.py @@ -21,7 +21,7 @@ def preparemessageforzipperservice(self,summaryfiles, message): msgjson['summarydocuments'] = self.to_json(msgjson['summarydocuments']) return msgjson except (Exception) as error: - print('error occured in redaction summary service: ', error) + print('error occured in zipping service: ', error) def to_json(self, obj): return json.dumps(obj, default=lambda obj: obj.__dict__) diff --git a/computingservices/PageCountCalculator/.gitignore b/computingservices/PageCountCalculator/.gitignore new file mode 100644 index 000000000..0fd8a721b --- /dev/null +++ b/computingservices/PageCountCalculator/.gitignore @@ -0,0 +1,2 @@ +__pycache__/* +*.pyc \ No newline at end of file diff --git a/computingservices/PageCountCalculator/DockerFile.bcgov b/computingservices/PageCountCalculator/DockerFile.bcgov new file mode 100644 index 000000000..427e6e598 --- /dev/null +++ b/computingservices/PageCountCalculator/DockerFile.bcgov @@ -0,0 +1,18 @@ +FROM artifacts.developer.gov.bc.ca/docker-remote/python:3.10.8-buster + +# Keeps Python from generating .pyc files in the container +ENV PYTHONDONTWRITEBYTECODE=1 + +# Turns off buffering for easier container logging +ENV PYTHONUNBUFFERED=1 + +RUN useradd --create-home --shell /bin/bash app_user +WORKDIR /home/app_user +COPY requirements.txt ./ +RUN apt-get update \ + && apt-get -y install libpq-dev gcc \ + && pip install psycopg2 +RUN pip install --no-cache-dir -r requirements.txt +USER app_user +COPY . . +ENTRYPOINT ["/bin/sh", "./entrypoint.sh"] diff --git a/computingservices/PageCountCalculator/Dockerfile.local b/computingservices/PageCountCalculator/Dockerfile.local new file mode 100644 index 000000000..179ae3680 --- /dev/null +++ b/computingservices/PageCountCalculator/Dockerfile.local @@ -0,0 +1,18 @@ +FROM python:3.10.8 + +# Keeps Python from generating .pyc files in the container +ENV PYTHONDONTWRITEBYTECODE=1 + +# Turns off buffering for easier container logging +ENV PYTHONUNBUFFERED=1 + +RUN useradd --create-home --shell /bin/bash app_user +WORKDIR /home/app_user +COPY requirements.txt ./ +RUN apt-get update \ + && apt-get -y install libpq-dev gcc \ + && pip install psycopg2 +RUN pip install --no-cache-dir -r requirements.txt +USER app_user +COPY . . +ENTRYPOINT ["/bin/sh", "./entrypoint.sh"] diff --git a/computingservices/PageCountCalculator/__init__.py b/computingservices/PageCountCalculator/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/computingservices/PageCountCalculator/__main__.py b/computingservices/PageCountCalculator/__main__.py new file mode 100644 index 000000000..4c02454f8 --- /dev/null +++ b/computingservices/PageCountCalculator/__main__.py @@ -0,0 +1,6 @@ +from rstreamio.reader import pagecountcalculatorstreamreader + + + +if __name__ == '__main__': + pagecountcalculatorstreamreader.app() \ No newline at end of file diff --git a/computingservices/PageCountCalculator/entrypoint.sh b/computingservices/PageCountCalculator/entrypoint.sh new file mode 100644 index 000000000..bfa3d8512 --- /dev/null +++ b/computingservices/PageCountCalculator/entrypoint.sh @@ -0,0 +1,2 @@ +#!/bin/bash +python __main__.py $ \ No newline at end of file diff --git a/computingservices/PageCountCalculator/requirements.txt b/computingservices/PageCountCalculator/requirements.txt new file mode 100644 index 000000000..f6020afbb Binary files /dev/null and b/computingservices/PageCountCalculator/requirements.txt differ diff --git a/computingservices/PageCountCalculator/rstreamio/__init__.py b/computingservices/PageCountCalculator/rstreamio/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/computingservices/PageCountCalculator/rstreamio/message/__init__.py b/computingservices/PageCountCalculator/rstreamio/message/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/computingservices/PageCountCalculator/rstreamio/message/schemas/__init__.py b/computingservices/PageCountCalculator/rstreamio/message/schemas/__init__.py new file mode 100644 index 000000000..a444092a5 --- /dev/null +++ b/computingservices/PageCountCalculator/rstreamio/message/schemas/__init__.py @@ -0,0 +1,16 @@ +# Copyright © 2021 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Schema package.""" + + diff --git a/computingservices/PageCountCalculator/rstreamio/message/schemas/baseinfo.py b/computingservices/PageCountCalculator/rstreamio/message/schemas/baseinfo.py new file mode 100644 index 000000000..0aad9ac37 --- /dev/null +++ b/computingservices/PageCountCalculator/rstreamio/message/schemas/baseinfo.py @@ -0,0 +1,9 @@ +import json + +class baseobj(object): + def __init__(self, dict_): + self.__dict__.update(dict_) + + +def dict2obj(d): + return json.loads(json.dumps(d), object_hook=baseobj) \ No newline at end of file diff --git a/computingservices/PageCountCalculator/rstreamio/message/schemas/documentspagecount.py b/computingservices/PageCountCalculator/rstreamio/message/schemas/documentspagecount.py new file mode 100644 index 000000000..3420c8219 --- /dev/null +++ b/computingservices/PageCountCalculator/rstreamio/message/schemas/documentspagecount.py @@ -0,0 +1,25 @@ + +from marshmallow import EXCLUDE, fields, Schema +import json +from rstreamio.message.schemas.baseinfo import baseobj, dict2obj +""" +This class consolidates schemas of PageCount Calculation Service. + +""" +class DocumentsPageCountIncomingSchema(Schema): + + jobid = fields.Int(data_key="jobid",allow_none=False) + filename = fields.Str(data_key="filename",allow_none=True) + pagecount = fields.Int(data_key="pagecount",allow_none=True) + ministryrequestid = fields.Str(data_key="ministryrequestid",allow_none=False) + documentmasterid = fields.Str(data_key="documentmasterid",allow_none=True) + trigger = fields.Str(data_key="trigger",allow_none=True) + createdby = fields.Str(data_key="createdby",allow_none=True) + +def get_in_documents_pagecount_msg(producer_json): + messageobject = DocumentsPageCountIncomingSchema().load(__formatmsg(producer_json)) + return dict2obj(messageobject) + +def __formatmsg(producer_json): + j = json.loads(producer_json) + return j \ No newline at end of file diff --git a/computingservices/PageCountCalculator/rstreamio/reader/pagecountcalculatorstreamreader.py b/computingservices/PageCountCalculator/rstreamio/reader/pagecountcalculatorstreamreader.py new file mode 100644 index 000000000..cb79bb99a --- /dev/null +++ b/computingservices/PageCountCalculator/rstreamio/reader/pagecountcalculatorstreamreader.py @@ -0,0 +1,54 @@ +import json +import typer +import random +import time +import logging +from enum import Enum +from utils import redisstreamdb +from utils.foidocumentserviceconfig import pagecalculator_stream_key +from rstreamio.message.schemas.documentspagecount import get_in_documents_pagecount_msg +from services.pagecountservice.pagecountcalculationservice import pagecountcalculationservice + +LAST_ID_KEY = "{consumer_id}:lastid" +BLOCK_TIME = 5000 +STREAM_KEY = pagecalculator_stream_key + +app = typer.Typer() + +class StartFrom(str, Enum): + beginning = "0" + latest = "$" + +@app.command() +def start(consumer_id: str, start_from: StartFrom = StartFrom.latest): + rdb = redisstreamdb + stream = rdb.Stream(STREAM_KEY) + last_id = rdb.get(LAST_ID_KEY.format(consumer_id=consumer_id)) + if last_id: + print(f"PC Resume from ID: {last_id}") + else: + last_id = start_from.value + print(f"PC Starting from {start_from.name}") + + while True: + messages = stream.read(last_id=last_id, block=BLOCK_TIME) + if messages: + for _messages in messages: + # message_id is the random id created to identify the message + # message is the actual data passed to the stream + message_id, message = _messages + print(f"PC processing {message_id}::{message}") + if message is not None: + _message = json.dumps({str(key): str(value) for (key, value) in message.items()}) + _message = _message.replace("b'","'").replace("'",'') + try: + pagecountcalculationservice().processmessage(get_in_documents_pagecount_msg(_message)) + except(Exception) as error: + logging.exception(error) + # simulate processing + time.sleep(random.randint(1, 3)) #TODO : todo: remove! + last_id = message_id + rdb.set(LAST_ID_KEY.format(consumer_id=consumer_id), last_id) + print(f"PC finished processing {message_id}") + else: + logging.info(f"PC No new messages after ID: {last_id}") \ No newline at end of file diff --git a/computingservices/PageCountCalculator/sample.env b/computingservices/PageCountCalculator/sample.env new file mode 100644 index 000000000..ec0748ec8 --- /dev/null +++ b/computingservices/PageCountCalculator/sample.env @@ -0,0 +1,22 @@ + +#Properties of Page Calculator Service - Begin +REDIS_HOST= +REDIS_PORT= +REDIS_PASSWORD= + + +DOCUMENTSERVICE_DB_HOST= +DOCUMENTSERVICE_DB_NAME= +DOCUMENTSERVICE_DB_PORT= +DOCUMENTSERVICE_DB_USER= +DOCUMENTSERVICE_DB_PASSWORD= + + +FOI_DB_USER= +FOI_DB_PASSWORD= +FOI_DB_NAME= +FOI_DB_HOST= +FOI_DB_PORT= +PAGECALCULATOR_STREAM_KEY= + +#Properties of Page Calculator Service - End diff --git a/computingservices/PageCountCalculator/services/__init__.py b/computingservices/PageCountCalculator/services/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/computingservices/PageCountCalculator/services/dal/__init__.py b/computingservices/PageCountCalculator/services/dal/__init__.py new file mode 100644 index 000000000..eeeba930d --- /dev/null +++ b/computingservices/PageCountCalculator/services/dal/__init__.py @@ -0,0 +1 @@ +from utils import getdbconnection \ No newline at end of file diff --git a/computingservices/PageCountCalculator/services/dal/pagecount/documentservice.py b/computingservices/PageCountCalculator/services/dal/pagecount/documentservice.py new file mode 100644 index 000000000..910771216 --- /dev/null +++ b/computingservices/PageCountCalculator/services/dal/pagecount/documentservice.py @@ -0,0 +1,192 @@ +from utils import getdbconnection +import logging +from utils.basicutils import to_json + + +class documentservice: + + @classmethod + def pagecalculatorjobstart(cls, message): + conn = getdbconnection() + try: + + cursor = conn.cursor() + cursor.execute('''INSERT INTO public."PageCalculatorJob" + (pagecalculatorjobid, version, ministryrequestid, inputmessage, status, createdby) + VALUES (%s::integer, %s::integer, %s::integer, %s, %s, %s) on conflict (pagecalculatorjobid, version) do nothing returning pagecalculatorjobid;''', + (message.jobid, 2, message.ministryrequestid, to_json(message), 'started', message.createdby)) + conn.commit() + cursor.close() + except(Exception) as error: + print("Exception while executing func recordjobstart (p6), Error : {0} ".format(error)) + raise + finally: + if conn is not None: + conn.close() + + @classmethod + def pagecalculatorjobend(cls, jsonmessage, error, pagecount="", message=""): + conn = getdbconnection() + try: + + cursor = conn.cursor() + cursor.execute('''INSERT INTO public."PageCalculatorJob" + (pagecalculatorjobid, version, ministryrequestid, inputmessage, pagecount, status, createdby, message) + VALUES (%s::integer, %s::integer, %s::integer, %s, %s, %s, %s, %s) on conflict (pagecalculatorjobid, version) do nothing returning pagecalculatorjobid;''', + (jsonmessage.jobid, 3, jsonmessage.ministryrequestid, to_json(jsonmessage), to_json(pagecount), 'error' if error else 'completed', jsonmessage.createdby, message if error else "")) + conn.commit() + cursor.close() + + except(Exception) as error: + print("Exception while executing func recordjobend (p7), Error : {0} ".format(error)) + raise + finally: + if conn is not None: + conn.close() + @classmethod + def getdeleteddocuments(cls, ministryrequestid): + conn = getdbconnection() + deleted = [] + try: + cursor = conn.cursor() + query = ''' + SELECT DISTINCT d.documentmasterid + FROM "DocumentMaster" d + INNER JOIN "DocumentDeleted" dd ON d.filepath LIKE dd.filepath || '%%' + WHERE d.ministryrequestid = dd.ministryrequestid + AND d.ministryrequestid = %s::integer + ''' + parameters = (ministryrequestid,) + cursor.execute(query, parameters) + results = cursor.fetchall() + for row in results: + deleted.append(row[0]) + return deleted + except Exception as error: + logging.error("Error in getdeleteddocuments") + logging.error(error) + raise + finally: + if conn is not None: + conn.close() + + @classmethod + def getdocumentmaster(cls, ministryrequestid, deleteddocumentmasterids): + + if len(deleteddocumentmasterids) == 0: + deleteddocumentmasterids = [0] + conn = getdbconnection() + try: + cursor = conn.cursor() + query = ''' + SELECT dm.recordid, dm.parentid, d.filename as attachmentof, dm.filepath, dm.documentmasterid, da."attributes", + dm.created_at, dm.createdby + FROM "DocumentMaster" dm + JOIN "DocumentAttributes" da ON dm.documentmasterid = da.documentmasterid + LEFT JOIN "DocumentMaster" dm2 ON dm2.processingparentid = dm.parentid + -- replace attachment will create 2 or more rows with the same processing parent id + -- we always take the first one since we only need the filename and the user cannot update filename with replace anyways + AND dm2.createdby = 'conversionservice' + LEFT JOIN "Documents" d ON dm2.documentmasterid = d.documentmasterid + WHERE dm.ministryrequestid = %s::integer + AND da.isactive = true + AND da.attributes->>'incompatible' = 'false' + AND dm.documentmasterid NOT IN %s + ORDER BY da.attributes->>'lastmodified' DESC + ''' + parameters = (ministryrequestid, tuple(deleteddocumentmasterids)) + cursor.execute(query, parameters) + result = cursor.fetchall() + documentmaster = [] + for row in result: + document = {} + document["recordid"] = row[0] + document["parentid"] = row[1] + document["attachmentof"] = row[2] + document["filepath"] = row[3] + document["documentmasterid"] = row[4] + document["attributes"] = row[5] + document["created_at"] = row[6] + document["createdby"] = row[7] + documentmaster.append(document) + return documentmaster + except Exception as error: + logging.error("Error in getdocumentmaster") + logging.error(error) + raise + finally: + if conn is not None: + conn.close() + + @classmethod + def getdocumentproperties(cls, ministryrequestid, deleteddocumentmasterids): + if len(deleteddocumentmasterids) == 0: + deleteddocumentmasterids = [0] + conn = getdbconnection() + try: + cursor = conn.cursor() + query = ''' + SELECT dm.documentmasterid, dm.processingparentid, d.documentid, d.version, + dhc.rank1hash, d.filename, d.pagecount, dm.parentid + FROM "DocumentMaster" dm + INNER JOIN "Documents" d + ON dm.ministryrequestid = d.foiministryrequestid + AND dm.documentmasterid = d.documentmasterid + AND dm.ministryrequestid = %s::integer + AND dm.documentmasterid NOT IN %s + INNER JOIN "DocumentHashCodes" dhc ON d.documentid = dhc.documentid + ORDER BY dm.documentmasterid + ''' + parameters = (ministryrequestid, tuple(deleteddocumentmasterids)) + cursor.execute(query, parameters) + result = cursor.fetchall() + properties = [] + for row in result: + property = {} + property["documentmasterid"] = row[0] + property["processingparentid"] = row[1] + property["documentid"] = row[2] + property["version"] = row[3] + property["rank1hash"] = row[4] + property["filename"] = row[5] + property["pagecount"] = row[6] + property["parentid"] = row[7] + properties.append(property) + return properties + except Exception as error: + logging.error("Error in getdocumentproperties") + logging.error(error) + raise + finally: + if conn is not None: + conn.close() + + @classmethod + def getdedupestatus(cls, ministryrequestid): + conn = getdbconnection() + try: + cursor = conn.cursor() + query = ''' + SELECT DISTINCT ON (deduplicationjobid) deduplicationjobid, version, documentmasterid, filename + FROM "DeduplicationJob" fcj WHERE ministryrequestid = %s::integer + ORDER BY deduplicationjobid, "version" DESC + ''' + parameters = (ministryrequestid,) + cursor.execute(query, parameters) + result = cursor.fetchall() + dedupe = [] + for row in result: + document = {} + document["deduplicationjobid"] = row[0] + document["version"] = row[1] + document["documentmasterid"] = row[2] + document["filename"] = row[3] + dedupe.append(document) + return dedupe + except Exception as error: + logging.error("Error in getdedupestatus") + logging.error(error) + raise + finally: + if conn is not None: + conn.close() \ No newline at end of file diff --git a/computingservices/PageCountCalculator/services/dal/pagecount/ministryservice.py b/computingservices/PageCountCalculator/services/dal/pagecount/ministryservice.py new file mode 100644 index 000000000..a358236b4 --- /dev/null +++ b/computingservices/PageCountCalculator/services/dal/pagecount/ministryservice.py @@ -0,0 +1,48 @@ +from utils import getfoidbconnection +import logging +from datetime import datetime + +class ministryervice: + + @classmethod + def getlatestrecordspagecount(cls, ministryrequestid): + conn = getfoidbconnection() + try: + cursor = conn.cursor() + query = ''' + SELECT recordspagecount + FROM public."FOIMinistryRequests" + WHERE foiministryrequestid = %s::integer AND isactive = true + ORDER BY version DESC LIMIT 1; + ''' + parameters = (ministryrequestid,) + cursor.execute(query, parameters) + recordspagecount = cursor.fetchone()[0] + return recordspagecount + except Exception as error: + logging.error("Error in getlatestrecordspagecount") + logging.error(error) + raise + finally: + if conn is not None: + conn.close() + + @classmethod + def updaterecordspagecount(cls, ministryrequestid, pagecount, userid): + conn = getfoidbconnection() + try: + cursor = conn.cursor() + query = ''' + UPDATE public."FOIMinistryRequests" SET recordspagecount = %s::integer, updated_at = %s, updatedby = %s + WHERE foiministryrequestid = %s::integer AND isactive = true; + ''' + parameters = (pagecount, datetime.now().isoformat(), userid, ministryrequestid,) + cursor.execute(query, parameters) + conn.commit() + cursor.close() + except(Exception) as error: + print("Exception while executing func updaterecordspagecount, Error : {0} ".format(error)) + raise + finally: + if conn is not None: + conn.close() \ No newline at end of file diff --git a/computingservices/PageCountCalculator/services/pagecountservice/pagecountcalculationservice.py b/computingservices/PageCountCalculator/services/pagecountservice/pagecountcalculationservice.py new file mode 100644 index 000000000..ee8c248ae --- /dev/null +++ b/computingservices/PageCountCalculator/services/pagecountservice/pagecountcalculationservice.py @@ -0,0 +1,16 @@ + +import traceback +import json +from .pagecountservice import pagecountservice +from services.dal.pagecount.documentservice import documentservice + +class pagecountcalculationservice(): + + def processmessage(self, message): + try: + documentservice().pagecalculatorjobstart(message) + pagecountjson = pagecountservice().calculatepagecount(message) + documentservice().pagecalculatorjobend(message, False, pagecountjson) + except (Exception) as error: + print('error occured in pagecount calculation service: ', error) + documentservice().pagecalculatorjobend(message, True, pagecount="", message=format(error)) \ No newline at end of file diff --git a/computingservices/PageCountCalculator/services/pagecountservice/pagecountservice.py b/computingservices/PageCountCalculator/services/pagecountservice/pagecountservice.py new file mode 100644 index 000000000..b254b5e26 --- /dev/null +++ b/computingservices/PageCountCalculator/services/pagecountservice/pagecountservice.py @@ -0,0 +1,231 @@ +from services.dal.pagecount.documentservice import documentservice +from services.dal.pagecount.ministryservice import ministryervice +from utils.basicutils import pstformat + +class pagecountservice(): + + def calculatepagecount(self, message): + try: + requestpagecount = ministryervice().getlatestrecordspagecount(message.ministryrequestid) + print(f'requestpagecount == {requestpagecount}') + if requestpagecount in (None, 0) and hasattr(message, "pagecount"): + calculatedpagecount = message.pagecount + else: + calculatedpagecount = self.__calculatepagecount(message) + print(f'calculatedpagecount == {calculatedpagecount}') + if requestpagecount != calculatedpagecount: + ministryervice().updaterecordspagecount(message.ministryrequestid, calculatedpagecount, message.createdby) + return {'prevpagecount': requestpagecount, 'calculatedpagecount': calculatedpagecount} + except (Exception) as error: + print('error occured in pagecount calculation service: ', error) + + def __calculatepagecount(self, message): + records = self.__getdocumentdetails(message) + print(f'records == {records}') + page_count = 0 + for record in records: + if self.__pagecountcalculationneeded(record): + page_count += record.get("pagecount", 0) + attachments = record.get("attachments", []) + for attachment in attachments: + if not attachment.get("isduplicate", False): + page_count += attachment.get("pagecount", 0) + return page_count + + def __pagecountcalculationneeded(self, record): + if not record.get("isduplicate", False) and not record["attributes"].get("isportfolio", False) and not record['attributes'].get('incompatible', False): + return True + return False + + def __getdocumentdetails(self, message): + deleted = documentservice().getdeleteddocuments(message.ministryrequestid) + dedupes = documentservice().getdedupestatus(message.ministryrequestid) + records = documentservice().getdocumentmaster(message.ministryrequestid, deleted) + properties = documentservice().getdocumentproperties(message.ministryrequestid, deleted) + for record in records: + record["duplicatemasterid"] = record["documentmasterid"] + record["ministryrequestid"] = message.ministryrequestid + record["isattachment"] = True if record["parentid"] is not None else False + record["created_at"] = pstformat(record["created_at"]) + record = self.__updatededupestatus(dedupes, record) + if record["recordid"] is not None: + record["attachments"] = self.__getattachments(records, record["documentmasterid"], []) + + # Duplicate check + finalresults = [] + ( + parentrecords, + parentswithattachmentsrecords, + attachmentsrecords, + ) = self.__filterrecords(records) + parentproperties = self.__getrecordsproperties(parentrecords, properties) + + for record in records: + if record["recordid"] is not None: + finalresult = self.__updateproperties( + properties, + record, + parentproperties, + parentswithattachmentsrecords, + attachmentsrecords, + ) + finalresults.append(finalresult) + + return finalresults + + def __updatededupestatus(self, dedupes, record): + for dedupe in dedupes: + if record["documentmasterid"] == dedupe["documentmasterid"]: + record["filename"] = dedupe["filename"] + return record + + def __getattachments(self, records, documentmasterid, result=None): + if not result: + result = [] + for entry in records: + if entry["recordid"] is None: + if entry["parentid"] not in [None, ""] and int(entry["parentid"]) == int(documentmasterid): + result.append(entry) + result = self.__getattachments( + records, entry["documentmasterid"], result + ) + return result + + def __filterrecords(self, records): + parentrecords = [] + parentswithattachments = [] + attchments = [] + for record in records: + if record["recordid"] is not None: + parentrecords.append(record) + if "attachments" in record and len(record["attachments"]) > 0: + parentswithattachments.append(record) + if record["recordid"] is None: + attchments.append(record) + return parentrecords, parentswithattachments, attchments + + def __getrecordsproperties(self, records, properties): + filtered = [] + for record in records: + for property in properties: + if property["processingparentid"] == record["documentmasterid"] or ( + property["processingparentid"] is None + and record["documentmasterid"] == property["documentmasterid"] + ): + filtered.append(property) + return filtered + + def __updateproperties( + self, + properties, + record, + parentproperties, + parentswithattachmentsrecords, + attachmentsrecords, + ): + if record["recordid"] is not None: + _att_in_properties = [] + (record["pagecount"], record["filename"]) = self.__getpagecountandfilename(record, parentproperties) + record["isduplicate"], record["duplicatemasterid"] = self.__isduplicate(parentproperties, record) + if "attachments" in record and len(record["attachments"]) > 0: + if record["isduplicate"] == True: + duplicatemaster_attachments = self.__getduplicatemasterattachments(parentswithattachmentsrecords, record["duplicatemasterid"]) + if duplicatemaster_attachments is None: + _att_in_properties = self.__getattachmentproperties(record["attachments"], properties) + else: + _att_in_properties = self.__getattachmentproperties(duplicatemaster_attachments + record["attachments"],properties) + elif len(record["attachments"]) > 0: + _att_in_properties = self.__getattachmentproperties(record["attachments"], properties) + for attachment in record["attachments"]: + if len(_att_in_properties) > 1: + if attachment["filepath"].endswith(".msg"): + attachment["isduplicate"], attachment["duplicatemasterid"] = self.__getduplicatemsgattachment(attachmentsrecords, _att_in_properties, attachment) + else: + attachment["isduplicate"], attachment["duplicatemasterid"] = self.__isduplicate(_att_in_properties, attachment) + else: + attachment["isduplicate"] = False + attachment["duplicatemasterid"] = attachment["documentmasterid"] + (attachment["pagecount"], attachment["filename"]) = self.__getpagecountandfilename(attachment, _att_in_properties) + return record + + def __getpagecountandfilename(self, record, properties): + pagecount = 0 + filename = record["filename"] if "filename" in record else None + for property in properties: + if record["documentmasterid"] == property["processingparentid"] or ( + property["processingparentid"] is None + and record["documentmasterid"] == property["documentmasterid"] + ): + pagecount = property["pagecount"] + filename = property["filename"] + return pagecount, filename + + def __getduplicatemasterattachments(self, records, duplicatemasterid): + return self.__getrecordproperty(records, duplicatemasterid, "attachments") + + def __getrecordproperty(self, records, documentmasterid, property): + for x in records: + if x["documentmasterid"] == documentmasterid: + return x[property] + return None + + def __getattachmentproperties(self, attachments, properties): + filtered = [] + for attachment in attachments: + for property in properties: + if property["processingparentid"] == attachment["documentmasterid"] or ( + property["processingparentid"] is None + and attachment["documentmasterid"] == property["documentmasterid"] + ): + filtered.append(property) + return filtered + + def __getduplicatemsgattachment(self, records, attachmentproperties, attachment): + _occurances = [] + for entry in attachmentproperties: + if entry["filename"] == attachment["filename"]: + _lhsattribute = self.__getrecordproperty( + records, entry["processingparentid"], "attributes" + ) + _rhsattribute = self.__getrecordproperty( + records, attachment["documentmasterid"], "attributes" + ) + if ( + _lhsattribute["filesize"] == _rhsattribute["filesize"] + and _lhsattribute["lastmodified"] == _rhsattribute["lastmodified"] + ): + _occurances.append(entry) + if len(_occurances) > 1: + filtered = [x["processingparentid"] for x in _occurances] + _firstupload = min(filtered) + if _firstupload != attachment["documentmasterid"]: + attachment["isduplicate"] = True + attachment["duplicatemasterid"] = _firstupload + return attachment["isduplicate"], attachment["duplicatemasterid"] + return False, attachment["documentmasterid"] + + def __isduplicate(self, properties, record): + matchedhash = None + isduplicate = False + duplicatemasterid = record["documentmasterid"] + for property in properties: + if property["processingparentid"] == record["documentmasterid"] or ( + property["processingparentid"] is None + and record["documentmasterid"] == property["documentmasterid"] + ): + matchedhash = property["rank1hash"] + filtered = [] + for x in properties: + if x["rank1hash"] == matchedhash: + value = ( + x["processingparentid"] + if x["processingparentid"] is not None + else x["documentmasterid"] + ) + filtered.append(value) + if len(filtered) > 1 and filtered not in (None, []): + originalid = min(filtered) + if originalid != record["documentmasterid"]: + isduplicate = True + duplicatemasterid = originalid + return isduplicate, duplicatemasterid \ No newline at end of file diff --git a/computingservices/PageCountCalculator/unittests/__init__.py b/computingservices/PageCountCalculator/unittests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/computingservices/PageCountCalculator/unittests/files/sample.pdf b/computingservices/PageCountCalculator/unittests/files/sample.pdf new file mode 100644 index 000000000..af2949c43 Binary files /dev/null and b/computingservices/PageCountCalculator/unittests/files/sample.pdf differ diff --git a/computingservices/PageCountCalculator/unittests/producer.py b/computingservices/PageCountCalculator/unittests/producer.py new file mode 100644 index 000000000..82e2e18e7 --- /dev/null +++ b/computingservices/PageCountCalculator/unittests/producer.py @@ -0,0 +1,25 @@ +import sys +import random +from walrus import Database + + +def main(stream_key): + rdb = Database(host='192.168.4.26', port=7379) + + stream = rdb.Stream(stream_key) + + msg_id = stream.add( + + # {'s3filepath': 'https://citz-foi-prod.objectstore.gov.bc.ca/edu-dev-e/EDU-2024-16010730/a4e28721-adcb-4119-839e-4149b4b3421f.pdf', 'filename': 'Policy Summary.pdf', 'ministryrequestid': '22', 'documentmasterid': '380', 'trigger': 'recordupload'} + # {'s3filepath': 'https://citz-foi-prod.objectstore.gov.bc.ca/edu-dev-e/EDU-2024-16010636/ee909132-8a96-451d-a681-c230a4d5a278.pdf', 'filename': 'Policy Summary.pdf', 'hashcode': '2f472e8be54a4cbdcb2437a96e2fdbc64429708d', 'pagecount': '2', 'ministryrequestid': '21', 'documentmasterid': '387', 'trigger': 'recordupload'} + {'filename': 'Policy Summary - Copy.pdf', 'pagecount': '2', 'ministryrequestid': '21', 'documentmasterid': '388', 'trigger': 'recordupload'} + , + id="*" + ) + print(f"message {msg_id} sent") + + +if __name__ == "__main__": + stream_key = "CALCULATE-PAGE-COUNT" + #sensor = sys.argv[1] + main(stream_key) diff --git a/computingservices/PageCountCalculator/utils/__init__.py b/computingservices/PageCountCalculator/utils/__init__.py new file mode 100644 index 000000000..59dc1f2f3 --- /dev/null +++ b/computingservices/PageCountCalculator/utils/__init__.py @@ -0,0 +1,3 @@ +from .foiredisstreamdb import redisstreamdb +from .foidocumentserviceconfig import * +from .dbconnection import getdbconnection, getfoidbconnection diff --git a/computingservices/PageCountCalculator/utils/basicutils.py b/computingservices/PageCountCalculator/utils/basicutils.py new file mode 100644 index 000000000..88ad858d8 --- /dev/null +++ b/computingservices/PageCountCalculator/utils/basicutils.py @@ -0,0 +1,14 @@ +import maya +import json + +def to_json(obj): + return json.dumps(obj, default=lambda obj: obj.__dict__) + +def pstformat(dt): + if dt is not None: + tolocaltime = maya.MayaDT.from_datetime(dt).datetime( + to_timezone="America/Vancouver", naive=False + ) + return tolocaltime.isoformat() + else: + return "" \ No newline at end of file diff --git a/computingservices/PageCountCalculator/utils/dbconnection.py b/computingservices/PageCountCalculator/utils/dbconnection.py new file mode 100644 index 000000000..d98365a31 --- /dev/null +++ b/computingservices/PageCountCalculator/utils/dbconnection.py @@ -0,0 +1,18 @@ +import psycopg2 +from . import docservice_db_user,docservice_db_port,docservice_db_host,docservice_db_name,docservice_db_password,foi_db_user,foi_db_port,foi_db_host,foi_db_name,foi_db_password + +def getdbconnection(): + conn = psycopg2.connect( + host=docservice_db_host, + database=docservice_db_name, + user=docservice_db_user, + password=docservice_db_password,port=docservice_db_port) + return conn + +def getfoidbconnection(): + conn = psycopg2.connect( + host=foi_db_host, + database=foi_db_name, + user=foi_db_user, + password=foi_db_password,port=foi_db_port) + return conn \ No newline at end of file diff --git a/computingservices/PageCountCalculator/utils/foidocumentserviceconfig.py b/computingservices/PageCountCalculator/utils/foidocumentserviceconfig.py new file mode 100644 index 000000000..44d37a213 --- /dev/null +++ b/computingservices/PageCountCalculator/utils/foidocumentserviceconfig.py @@ -0,0 +1,25 @@ +import os +import logging +import requests + +from dotenv import load_dotenv + +load_dotenv() + + +redishost = os.getenv("REDIS_HOST") +redisport = os.getenv("REDIS_PORT") +redispassword = os.getenv("REDIS_PASSWORD") +pagecalculator_stream_key = os.getenv("PAGECALCULATOR_STREAM_KEY") + +docservice_db_host = os.getenv("DOCUMENTSERVICE_DB_HOST") +docservice_db_name = os.getenv("DOCUMENTSERVICE_DB_NAME") +docservice_db_port = os.getenv("DOCUMENTSERVICE_DB_PORT") +docservice_db_user = os.getenv("DOCUMENTSERVICE_DB_USER") +docservice_db_password = os.getenv("DOCUMENTSERVICE_DB_PASSWORD") + +foi_db_host = os.getenv("FOI_DB_HOST") +foi_db_name = os.getenv("FOI_DB_NAME") +foi_db_port = os.getenv("FOI_DB_PORT") +foi_db_user = os.getenv("FOI_DB_USER") +foi_db_password = os.getenv("FOI_DB_PASSWORD") \ No newline at end of file diff --git a/computingservices/PageCountCalculator/utils/foiredisstreamdb.py b/computingservices/PageCountCalculator/utils/foiredisstreamdb.py new file mode 100644 index 000000000..09af2d8ee --- /dev/null +++ b/computingservices/PageCountCalculator/utils/foiredisstreamdb.py @@ -0,0 +1,5 @@ +from walrus import Database +from .foidocumentserviceconfig import redishost, redisport, redispassword + + +redisstreamdb = Database(host=str(redishost), port=str(redisport), db=0,password=str(redispassword)) \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index e5324a0be..780be4c45 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -7,7 +7,7 @@ services: dockerfile: Dockerfile.local args: - NODE_ENV=${NODE_ENV:-development} - - GENERATE_SOURCEMAP=false + - GENERATE_SOURCEMAP=${GENERATE_SOURCEMAP:-false} - REACT_APP_KEYCLOAK_CLIENT=${KEYCLOAK_WEB_CLIENTID:-foi-document-redaction} - REACT_APP_KEYCLOAK_URL_REALM=${KEYCLOAK_URL_REALM:-5k8dbl4h} - REACT_APP_KEYCLOAK_URL=${KEYCLOAK_URL} @@ -80,6 +80,7 @@ services: - DOCUMENTSERVICE_STREAM_KEY=${DOCUMENTSERVICE_STREAM_KEY} - BATCH_CONFIG=${BATCH_CONFIG} - REDLINE_SINGLE_PKG_MINISTRIES=${REDLINE_SINGLE_PKG_MINISTRIES} + - PAGECALCULATOR_STREAM_KEY=${PAGECALCULATOR_STREAM_KEY} foi-docreviewer-db: image: postgres @@ -157,6 +158,8 @@ services: - DEDUPE_REQUEST_MANAGEMENT_API=${DEDUPE_REQUEST_MANAGEMENT_API} - DEDUPE_RECORD_FORMATS=${DEDUPE_RECORD_FORMATS} - NOTIFICATION_STREAM_KEY=${NOTIFICATION_STREAM_KEY} + - PAGECALCULATOR_STREAM_KEY=${PAGECALCULATOR_STREAM_KEY} + - HEALTH_CHECK_INTERVAL=${HEALTH_CHECK_INTERVAL} foi-docreviewer-zippingservice: container_name: foi-docreviewer-zippingservice @@ -236,7 +239,7 @@ services: - ZIPPER_REDIS_PASSWORD=${DEDUPE_REDIS_PASSWORD} - ZIPPER_REDIS_PORT=${DEDUPE_REDIS_PORT} - ZIPPER_STREAM_KEY=${ZIPPER_STREAM_KEY} - + foi-docreviewer-documentservice: container_name: foi-docreviewer-documentservice depends_on: @@ -280,8 +283,36 @@ services: - FOI_DB_USER=${FOI_DB_USER} - FOI_DB_PASSWORD=${FOI_DB_PASSWORD} - - + foi-docreviewer-pagecountcalculatorservice: + container_name: foi-docreviewer-pagecountcalculator + depends_on: + - foi-docreviewer-db + - foi-docreviewer-redis + build: + context: ./computingservices/PageCountCalculator + dockerfile: Dockerfile.local + image: docreviewerpagecountcalculatorimage + stdin_open: true + tty: true + networks: + services-network: + aliases: + - docreviewerpagecountcalculator + environment: + - REDIS_HOST=${REDIS_HOST} + - REDIS_PASSWORD=${REDIS_PASSWORD} + - REDIS_PORT=${REDIS_PORT} + - DOCUMENTSERVICE_DB_HOST=${DOCUMENTSERVICE_DB_HOST} + - DOCUMENTSERVICE_DB_NAME=${DOCUMENTSERVICE_DB_NAME} + - DOCUMENTSERVICE_DB_PORT=${DOCUMENTSERVICE_DB_PORT} + - DOCUMENTSERVICE_DB_USER=${DOCUMENTSERVICE_DB_USER} + - DOCUMENTSERVICE_DB_PASSWORD=${DOCUMENTSERVICE_DB_PASSWORD} + - PAGECALCULATOR_STREAM_KEY=${PAGECALCULATOR_STREAM_KEY} + - FOI_DB_HOST=${FOI_DB_HOST} + - FOI_DB_NAME=${FOI_DB_NAME} + - FOI_DB_PORT=${FOI_DB_PORT} + - FOI_DB_USER=${FOI_DB_USER} + - FOI_DB_PASSWORD=${FOI_DB_PASSWORD} volumes: dbdata: networks: diff --git a/openshift/templates/pagecountcalculator/pagecountcalculator-build-main.yaml b/openshift/templates/pagecountcalculator/pagecountcalculator-build-main.yaml new file mode 100644 index 000000000..9af3c1253 --- /dev/null +++ b/openshift/templates/pagecountcalculator/pagecountcalculator-build-main.yaml @@ -0,0 +1,61 @@ +--- +kind: Template +apiVersion: template.openshift.io/v1 +metadata: + name: "${APP_NAME}-build-template" + creationTimestamp: +objects: +- kind: ImageStream + apiVersion: v1 + metadata: + name: "${APP_NAME}" +- kind: BuildConfig + apiVersion: v1 + metadata: + name: "${APP_NAME}-build" + labels: + app: "${APP_NAME}-build" + spec: + runPolicy: Serial + source: + type: Git + git: + uri: "${GIT_REPO_URL}" + ref: "${GIT_REF}" + contextDir: "${SOURCE_CONTEXT_DIR}" + strategy: + type: Docker + dockerStrategy: + dockerfilePath: "${DOCKER_FILE_PATH}" + pullSecret: + name: artifacts-pull-default-jmhvkc + output: + to: + kind: ImageStreamTag + name: "${APP_NAME}:latest" +parameters: +- name: APP_NAME + displayName: Name + description: The name assigned to all of the resources defined in this template. + required: true + value: reviewer-pagecountcalculator +- name: GIT_REPO_URL + displayName: Git Repo URL + description: The URL to your GIT repo. + required: true + value: https://github.com/bcgov/foi-docreviewer +- name: GIT_REF + displayName: Git Reference + description: The git reference or branch. + required: true + value: main +- name: SOURCE_CONTEXT_DIR + displayName: Source Context Directory + description: The source context directory. + required: false + value: computingservices/PageCountCalculator +- name: DOCKER_FILE_PATH + displayName: Docker File Path + description: The path to the docker file defining the build. + required: false + value: "Dockerfile.local" \ No newline at end of file diff --git a/openshift/templates/pagecountcalculator/pagecountcalculator-deploy.yaml b/openshift/templates/pagecountcalculator/pagecountcalculator-deploy.yaml new file mode 100644 index 000000000..63c3f5628 --- /dev/null +++ b/openshift/templates/pagecountcalculator/pagecountcalculator-deploy.yaml @@ -0,0 +1,181 @@ +--- +kind: Template +apiVersion: template.openshift.io/v1 +metadata: + annotations: + description: Deployment template for a pagecountcalculator service. + tags: "${APP_NAME}" + name: "${APP_NAME}-deploy" +objects: +- kind: DeploymentConfig + apiVersion: v1 + metadata: + name: "${APP_NAME}" + labels: + app: "${APP_NAME}" + app-group: "${APP_GROUP}" + template: "${APP_NAME}-deploy" + spec: + strategy: + type: Rolling + rollingParams: + updatePeriodSeconds: 1 + intervalSeconds: 1 + timeoutSeconds: 600 + maxUnavailable: 25% + maxSurge: 25% + triggers: + - type: ImageChange + imageChangeParams: + automatic: true + containerNames: + - "${APP_NAME}" + from: + kind: ImageStreamTag + namespace: "${IMAGE_NAMESPACE}" + name: "${IMAGE_NAME}:${TAG_NAME}" + - type: ConfigChange + replicas: 1 + test: false + selector: + app: "${APP_NAME}" + deploymentconfig: "${APP_NAME}" + template: + metadata: + labels: + app: "${APP_NAME}" + app-group: "${APP_GROUP}" + deploymentconfig: "${APP_NAME}" + template: "${APP_NAME}-deploy" + spec: + containers: + - name: "${APP_NAME}" + image: "${APP_NAME}" + imagePullPolicy: Always + env: + - name: REDIS_HOST + valueFrom: + secretKeyRef: + name: "${SECRETS}" + key: REDIS_HOST + - name: REDIS_PASSWORD + valueFrom: + secretKeyRef: + name: "${SECRETS}" + key: REDIS_PASSWORD + - name: REDIS_PORT + valueFrom: + secretKeyRef: + name: "${SECRETS}" + key: REDIS_PORT + - name: PAGECALCULATOR_STREAM_KEY + valueFrom: + secretKeyRef: + name: "${SECRETS}" + key: PAGECALCULATOR_STREAM_KEY + - name: DOCUMENTSERVICE_DB_HOST + valueFrom: + secretKeyRef: + name: "${SECRETS}" + key: DOCUMENTSERVICE_DB_HOST + - name: DOCUMENTSERVICE_DB_NAME + valueFrom: + secretKeyRef: + name: "${SECRETS}" + key: DOCUMENTSERVICE_DB_NAME + - name: DOCUMENTSERVICE_DB_PORT + valueFrom: + secretKeyRef: + name: "${SECRETS}" + key: DOCUMENTSERVICE_DB_PORT + - name: DOCUMENTSERVICE_DB_USER + valueFrom: + secretKeyRef: + name: "${DB_SECRETS}" + key: app-db-username + - name: DOCUMENTSERVICE_DB_PASSWORD + valueFrom: + secretKeyRef: + name: "${DB_SECRETS}" + key: app-db-password + - name: FOI_DB_HOST + valueFrom: + secretKeyRef: + name: "${SECRETS}" + key: FOI_DB_HOST + - name: FOI_DB_NAME + valueFrom: + secretKeyRef: + name: "${SECRETS}" + key: FOI_DB_NAME + - name: FOI_DB_PORT + valueFrom: + secretKeyRef: + name: "${SECRETS}" + key: FOI_DB_PORT + - name: FOI_DB_USER + valueFrom: + secretKeyRef: + name: "${SECRETS}" + key: FOI_DB_USER + - name: FOI_DB_PASSWORD + valueFrom: + secretKeyRef: + name: "${SECRETS}" + key: FOI_DB_PASSWORD + resources: + requests: + cpu: "50m" + memory: "250Mi" + limits: + cpu: "150m" + memory: "500Mi" + terminationMessagePath: "/dev/termination-log" + terminationMessagePolicy: File + imagePullPolicy: Always + restartPolicy: Always + terminationGracePeriodSeconds: 30 + dnsPolicy: ClusterFirst + securityContext: {} + schedulerName: default-scheduler + # status: + # loadBalancer: {} +parameters: +- name: APP_NAME + displayName: Name + description: The name assigned to all of the OpenShift resources associated to the + server instance. + required: true + value: reviewer-pagecountcalculator +- name: APP_GROUP + displayName: App Group + description: The name assigned to all of the deployments in this project. + required: true + value: foi-docreviewer +- name: IMAGE_NAMESPACE + displayName: Image Namespace + required: true + description: The namespace of the OpenShift project containing the imagestream for + the application. + value: d106d6-tools +- name: IMAGE_NAME + displayName: Name + description: The name assigned to all of the OpenShift resources associated to the + server instance. + required: true + value: reviewer-pagecountcalculator +- name: TAG_NAME + displayName: Environment TAG name + description: The TAG name for this environment, e.g., dev, test, prod + required: true + value: dev +- name: DB_SECRETS + displayName: Patroni DB Secrets + description: Name of secrets for all db values + required: true + value: patroni-docreviewer +- name: SECRETS + displayName: Dedupe Secrets + description: Name of secrets for all pagecountcalculator values + required: true + value: pagecountcalculator-secret diff --git a/openshift/templates/pagecountcalculator/pagecountcalculator_secrets.yaml b/openshift/templates/pagecountcalculator/pagecountcalculator_secrets.yaml new file mode 100644 index 000000000..ad602aecb --- /dev/null +++ b/openshift/templates/pagecountcalculator/pagecountcalculator_secrets.yaml @@ -0,0 +1,69 @@ +apiVersion: template.openshift.io/v1 +kind: Template +metadata: + name: ${NAME} + labels: + app: ${NAME} + name: ${NAME} +objects: + - apiVersion: v1 + kind: Secret + metadata: + name: ${NAME} + stringData: + REDIS_HOST: "${REDIS_HOST}" + REDIS_PORT: "${REDIS_PORT}" + REDIS_PASSWORD: '${REDIS_PASSWORD}' + DOCUMENTSERVICE_DB_HOST: '${DOCUMENTSERVICE_DB_HOST}' + DOCUMENTSERVICE_DB_NAME: '${DOCUMENTSERVICE_DB_NAME}' + DOCUMENTSERVICE_DB_PORT: '${DOCUMENTSERVICE_DB_PORT}' + PAGECALCULATOR_STREAM_KEY: '${PAGECALCULATOR_STREAM_KEY}' + FOI_DB_HOST: '${FOI_DB_HOST}' + FOI_DB_NAME: '${FOI_DB_NAME}' + FOI_DB_PORT: '${FOI_DB_PORT}' + FOI_DB_USER: '${FOI_DB_USER}' + FOI_DB_PASSWORD: '${FOI_DB_PASSWORD}' + + type: Opaque + +parameters: + - name: NAME + description: The name for all created objects. + required: true + value: pagecountcalculator-secret + - name: REDIS_HOST + description: REDIS_HOST + required: true + - name: REDIS_PORT + description: REDIS_PORT + required: true + - name: REDIS_PASSWORD + description: REDIS_PASSWORD + required: true + - name: DOCUMENTSERVICE_DB_HOST + description: DOCUMENTSERVICE_DB_HOST + required: true + - name: DOCUMENTSERVICE_DB_NAME + description: DOCUMENTSERVICE_DB_NAME + required: true + - name: DOCUMENTSERVICE_DB_PORT + description: DOCUMENTSERVICE_DB_PORT + required: true + - name: PAGECALCULATOR_STREAM_KEY + description: PAGECALCULATOR_STREAM_KEY + required: true + - name: FOI_DB_HOST + description: FOI_DB_HOST + required: true + - name: FOI_DB_NAME + description: FOI_DB_NAME + required: true + - name: FOI_DB_PORT + description: FOI_DB_PORT + required: true + - name: FOI_DB_USER + description: FOI_DB_USER + required: true + - name: FOI_DB_PASSWORD + description: FOI_DB_PASSWORD + required: true \ No newline at end of file diff --git a/openshift/templates/pagecountcalculator/pagecountcalculator_secrets_param.yaml b/openshift/templates/pagecountcalculator/pagecountcalculator_secrets_param.yaml new file mode 100644 index 000000000..11d15d327 --- /dev/null +++ b/openshift/templates/pagecountcalculator/pagecountcalculator_secrets_param.yaml @@ -0,0 +1,12 @@ +REDIS_HOST= +REDIS_PASSWORD= +REDIS_PORT= +DOCUMENTSERVICE_DB_HOST= +DOCUMENTSERVICE_DB_NAME= +DOCUMENTSERVICE_DB_PORT= +PAGECALCULATOR_STREAM_KEY=CALCULATE-PAGE-COUNT +FOI_DB_HOST= +FOI_DB_NAME= +FOI_DB_PORT= +FOI_DB_USER= +FOI_DB_PASSWORD= \ No newline at end of file diff --git a/sample.env b/sample.env index 658eb4fb1..73cb8b396 100644 --- a/sample.env +++ b/sample.env @@ -115,4 +115,11 @@ FILE_CONVERSION_FAILTUREATTEMPT=3 FILE_CONVERSION_WAITTIME=2000 #Provide the values in upper case with comma seperation -REDLINE_SINGLE_PKG_MINISTRIES=VALUE1,VALUE2 \ No newline at end of file +REDLINE_SINGLE_PKG_MINISTRIES=VALUE1,VALUE2 + +PAGECALCULATOR_STREAM_KEY="CALCULATE-PAGE-COUNT" +DOCUMENTSERVICE_DB_HOST= +DOCUMENTSERVICE_DB_NAME= +DOCUMENTSERVICE_DB_PORT= +DOCUMENTSERVICE_DB_USER= +DOCUMENTSERVICE_DB_PASSWORD= \ No newline at end of file diff --git a/web/src/actions/actionConstants.ts b/web/src/actions/actionConstants.ts index e41d37d3d..ea90be68f 100644 --- a/web/src/actions/actionConstants.ts +++ b/web/src/actions/actionConstants.ts @@ -18,7 +18,8 @@ const ACTION_CONSTANTS = { SET_REDACTION_LAYERS: "SET_REDACTION_LAYERS", SET_CURRENT_LAYER: "SET_CURRENT_LAYER", INC_REDACTION_LAYER: "INC_REDACTION_LAYER", - SET_REQUEST_NUMBER:"SET_REQUEST_NUMBER" + SET_REQUEST_NUMBER:"SET_REQUEST_NUMBER", + SET_DELETED_PAGES: "SET_DELETED_PAGES" }; export default ACTION_CONSTANTS; diff --git a/web/src/actions/documentActions.ts b/web/src/actions/documentActions.ts index a0ab1446a..bb22fd8cb 100644 --- a/web/src/actions/documentActions.ts +++ b/web/src/actions/documentActions.ts @@ -84,4 +84,11 @@ export const incrementLayerCount = (data: any) => (dispatch:any) =>{ type:ACTION_CONSTANTS.INC_REDACTION_LAYER, payload:data }) +} + +export const setDeletedPages = (data: any) => (dispatch:any) =>{ + dispatch({ + type:ACTION_CONSTANTS.SET_DELETED_PAGES, + payload:data + }) } \ No newline at end of file diff --git a/web/src/apiManager/endpoints/index.tsx b/web/src/apiManager/endpoints/index.tsx index 2c6d962df..e856dc89e 100644 --- a/web/src/apiManager/endpoints/index.tsx +++ b/web/src/apiManager/endpoints/index.tsx @@ -16,5 +16,6 @@ const API = { DOCREVIEWER_REDLINE:`${DOCREVIEWER_BASE_API_URL}/api/triggerdownloadredline`, DOCREVIEWER_FINALPACKAGE:`${DOCREVIEWER_BASE_API_URL}/api/triggerdownloadfinalpackage`, DOCREVIEWER_LICENSE:`${DOCREVIEWER_BASE_API_URL}/api/foiflow/webviewerlicense`, + DOCREVIEWER_DOCUMENT_PAGE_DELETE:`${DOCREVIEWER_BASE_API_URL}/api/document/ministryrequest//deletedpages`, }; export default API; diff --git a/web/src/apiManager/services/docReviewerService.tsx b/web/src/apiManager/services/docReviewerService.tsx index efce67d7f..90758eb1e 100644 --- a/web/src/apiManager/services/docReviewerService.tsx +++ b/web/src/apiManager/services/docReviewerService.tsx @@ -3,7 +3,7 @@ import { httpGETRequest, httpPOSTRequest } from "../httpRequestHandler"; import API from "../endpoints"; import UserService from "../../services/UserService"; import { setRedactionInfo, setIsPageLeftOff, setSections, setPageFlags, - setDocumentList, setRequestStatus, setRedactionLayers, incrementLayerCount, setRequestNumber, setRequestInfo + setDocumentList, setRequestStatus, setRedactionLayers, incrementLayerCount, setRequestNumber, setRequestInfo, setDeletedPages } from "../../actions/documentActions"; import { store } from "../../services/StoreService"; import { number } from "yargs"; @@ -431,4 +431,56 @@ export const fetchPDFTronLicense = ( return ""; }); return response; +}; + +export const deleteDocumentPages = ( + requestid: string, + pagesDeleted: any, + callback: any, + errorCallback: any, +) => { + + let apiUrlPost: string = replaceUrl( + API.DOCREVIEWER_DOCUMENT_PAGE_DELETE, + "", + requestid + ); + +httpPOSTRequest({url: apiUrlPost, data: pagesDeleted, token: UserService.getToken() ?? '', isBearer: true}) + .then((res:any) => { + if (res.data) { + callback(res.data); + } else { + throw new Error(`Error while deleting document pages for (requestid# ${requestid})`); + } + }) + .catch((error:any) => { + errorCallback("Error in deleting document pages"); + }); +}; + +export const fetchDeletedDocumentPages = ( + mininstryrequestid: number, + callback: any, + errorCallback: any +) => { + + let apiUrlGet: string = replaceUrl( + API.DOCREVIEWER_DOCUMENT_PAGE_DELETE, + "", + mininstryrequestid + ); + + httpGETRequest(apiUrlGet, {}, UserService.getToken()) + .then((res:any) => { + if (res.data || res.data === "") { + store.dispatch(setDeletedPages(res.data)); + callback(res.data); + } else { + throw new Error(); + } + }) + .catch((error:any) => { + errorCallback("Error in fetching deleted pages:",error); + }); }; \ No newline at end of file diff --git a/web/src/components/FOI/Home/DocumentSelector.tsx b/web/src/components/FOI/Home/DocumentSelector.tsx index 136891a67..17f847060 100644 --- a/web/src/components/FOI/Home/DocumentSelector.tsx +++ b/web/src/components/FOI/Home/DocumentSelector.tsx @@ -98,7 +98,7 @@ const DocumentSelector = React.forwardRef(({ useEffect(() => { - let refLength = documents.reduce((acc: any, file: any) => acc + file.pagecount, 0); + let refLength = documents.reduce((acc: any, file: any) => acc + file.originalpagecount, 0); pageRefs.current = Array(refLength).fill(0).map((_, i) => pageRefs.current[i] || createRef()); }, [documents]) @@ -118,16 +118,10 @@ const DocumentSelector = React.forwardRef(({ }, [requestInfo]); const updatePageFlags = () => { - fetchPageFlagsMasterData( - requestid, - currentLayer.name.toLowerCase(), - (data: any) => setPageData(data), - (error: any) => console.log(error) - ); fetchPageFlag( requestid, currentLayer.name.toLowerCase(), - documents.map((d: any) => d.documentid), + Object.keys(pageMappedDocs?.docIdLookup).filter(key => pageMappedDocs?.docIdLookup[key].pageMappings.length > 0), //this will return only the documents which has pages in it (error: any) => console.log(error) ) } @@ -228,7 +222,9 @@ const DocumentSelector = React.forwardRef(({ } useEffect(() => { - setAdditionalData(); + if (pageFlags) { + setAdditionalData(); + } }, [consultMinistries, pageFlags]); const assignIcon = (pageFlag: any) => { @@ -256,7 +252,7 @@ const DocumentSelector = React.forwardRef(({ return faSpinner; case 8: case "Page Left Off": - return faBookmark; + return faBookmark; default: return null; } @@ -296,13 +292,15 @@ const DocumentSelector = React.forwardRef(({ } const selectTreeItem = (file: any, page: number) => { - if (pageMappedDocs?.docIdLookup && Object.keys(pageMappedDocs?.docIdLookup).length > 0) { - let pageNo: number = getStitchedPageNoFromOriginal(file.documentid, page, pageMappedDocs); - setIndividualDoc({ 'file': file, 'page': pageNo }) - setCurrentPageInfo({ 'file': file, 'page': page }); - // setCurrentDocument({ 'file': file, 'page': page }) - if (page == 1) - setDisableHover(false); + if (file.pages.includes(page)) { + if (pageMappedDocs?.docIdLookup && Object.keys(pageMappedDocs?.docIdLookup).length > 0) { + let pageNo: number = getStitchedPageNoFromOriginal(file.documentid, page, pageMappedDocs); + setIndividualDoc({ 'file': file, 'page': pageNo }) + setCurrentPageInfo({ 'file': file, 'page': page }); + // setCurrentDocument({ 'file': file, 'page': page }) + if (page == 1) + setDisableHover(false); + } } }; @@ -400,7 +398,7 @@ const DocumentSelector = React.forwardRef(({ } else - setFilesForDisplay(filteredFiles.filter((file: any) => ((filters.includes(0) && (typeof file.pageFlag === "undefined" || file.pageFlag?.length == 0 || file.pagecount != file.pageFlag?.length)) + setFilesForDisplay(filteredFiles.filter((file: any) => ((filters.includes(0) && (typeof file.pageFlag === "undefined" || file.pageFlag?.length == 0 || file.pagecount != getUpdatedPageFlagCount(file.pageFlag))) || (file.pageFlag?.find((obj: any) => ((obj.flagid != 4 && filters.includes(obj.flagid)))))) )); } @@ -408,6 +406,16 @@ const DocumentSelector = React.forwardRef(({ setFilesForDisplay(filteredFiles); } + // pageflags.length won't give the exact value if multiple pages flags (consult and any other page flag) added to a page + // Below method will return the count(distinct pages with pageflag) + const getUpdatedPageFlagCount = (pageFlags: any) => { + const distinctPages = new Set(); + for (const item of pageFlags) { + distinctPages.add(item.page); + } + return distinctPages.size; + } + const applyFilter = (flagId: number, consultee: any, event: any, allSelectedconsulteeList: any[]) => { const flagFilterCopy = [...filterFlags]; @@ -550,36 +558,40 @@ const DocumentSelector = React.forwardRef(({ const consulteeFilterView = (file: any, p: number, division?: any) => { return ( - (consulteeFilter.length > 0 ? - ((file.pageFlag?.find((obj: any) => obj.page === p + 1 && - ( (obj.flagid != 4 && filterFlags?.includes(obj.flagid))|| - (obj.programareaid?.some((val: any) => consulteeFilter.includes(val))) || - (obj.other?.some((val: any) => consulteeFilter.includes(val)))))) - && -
- openContextMenu(file, p + 1, e)} /> -
- ) : - viewWithoutConsulteeFilter(file, p) - ) + (file.pages.includes(p + 1) ? + (consulteeFilter.length > 0 ? + ((file.pageFlag?.find((obj: any) => obj.page === p + 1 && + ( (obj.flagid != 4 && filterFlags?.includes(obj.flagid))|| + (obj.programareaid?.some((val: any) => consulteeFilter.includes(val))) || + (obj.other?.some((val: any) => consulteeFilter.includes(val)))))) + && +
+ openContextMenu(file, p + 1, e)} /> +
+ ) : + viewWithoutConsulteeFilter(file, p) + ) : null + ) ); } const noFilterView = (file: any, p: number, division?: any) => { return ( - (file.pageFlag?.find((obj: any) => obj.page === p + 1) ? -
- openContextMenu(file, p + 1, e)} /> -
- : -
- openContextMenu(file, p + 1, e)} /> + (file.pages.includes(p + 1) ? + (file.pageFlag?.find((obj: any) => obj.page === p + 1) ? +
+ openContextMenu(file, p + 1, e)} />
+ : +
+ openContextMenu(file, p + 1, e)} /> +
+ ) : null ) ) } @@ -607,56 +619,58 @@ const DocumentSelector = React.forwardRef(({ ) } - const sortByModifiedDateView = filesForDisplay?.map((file: any, index: number) => { - return ( - organizeBy === "lastmodified" ? ( - - Last Modified Date: {new Date(file.attributes.lastmodified).toLocaleString('en-US', { timeZone: 'America/Vancouver' })} - {file.attachmentof && <>

Attachment of: {file.attachmentof}} - } - placement="bottom-end" - arrow - key={file?.documentid} - disableHoverListener={disableHover} - > - - { - expanded?.length > 0 ? - ( - [...Array(file.pagecount)].map((_x, p) => - (filterFlags.length > 0 ? - consulteeFilterView(file,p) - : - noFilterView(file,p) - ) - ) - ) : (<>) - } - {pageFlagList && pageFlagList?.length > 0 && openContextPopup === true && - - } - -
) : <> - ) + const sortByModifiedDateView = filesForDisplay?.map((file: any, index: number) => { + if (file?.pages?.length > 0) { + return ( + organizeBy === "lastmodified" ? ( + + Last Modified Date: {new Date(file.attributes.lastmodified).toLocaleString('en-US', { timeZone: 'America/Vancouver' })} + {file.attachmentof && <>

Attachment of: {file.attachmentof}} + } + placement="bottom-end" + arrow + key={file?.documentid} + disableHoverListener={disableHover} + > + + { + expanded?.length > 0 ? + ( + [...Array(file.originalpagecount)].map((_x, p) => + (filterFlags.length > 0 ? + consulteeFilterView(file,p) + : + noFilterView(file,p) + ) + ) + ) : (<>) + } + {pageFlagList && pageFlagList?.length > 0 && openContextPopup === true && + + } + +
) : <> + ) + } }) const sortByDivisionFilterView = divisions.map((division: any, index) => { @@ -664,48 +678,53 @@ const DocumentSelector = React.forwardRef(({ organizeBy === "division" ? ( {filesForDisplay.filter((file: any) => file.divisions.map((d: any) => d.divisionid).includes(division.divisionid)).map((file: any, i: number) => - - Last Modified Date: {new Date(file.attributes.lastmodified).toLocaleString('en-US', { timeZone: 'America/Vancouver' })} - {file.attachmentof && <>

Attachment of: {file.attachmentof}} - } - placement="bottom-end" - arrow - key={file.documentid} - disableHoverListener={disableHover} - > + <> + { file?.pages?.length > 0 ? + + Last Modified Date: {new Date(file.attributes.lastmodified).toLocaleString('en-US', { timeZone: 'America/Vancouver' })} + {file.attachmentof && <>

Attachment of: {file.attachmentof}} + } + placement="bottom-end" + arrow + key={file.documentid} + disableHoverListener={disableHover} + > - - {[...Array(file.pagecount)].map((_x, p) => - (filterFlags.length > 0 ? - consulteeFilterView(file,p,division) - : - noFilterView(file,p,division) - ) - ) - } - {pageFlagList && pageFlagList?.length > 0 && - - } - -
+ + {[...Array(file.originalpagecount)].map((_x, p) => + (filterFlags.length > 0 ? + consulteeFilterView(file,p,division) + : + noFilterView(file,p,division) + ) + ) + } + {pageFlagList && pageFlagList?.length > 0 && + + } + +
+ : null + } + )}
) : (<>) ) diff --git a/web/src/components/FOI/Home/Home.js b/web/src/components/FOI/Home/Home.js index f5fb6a2d3..7f9e53500 100644 --- a/web/src/components/FOI/Home/Home.js +++ b/web/src/components/FOI/Home/Home.js @@ -8,10 +8,11 @@ import Grid from "@mui/material/Grid"; import { fetchDocuments, fetchRedactionLayerMasterData, + fetchDeletedDocumentPages, } from "../../../apiManager/services/docReviewerService"; import { getFOIS3DocumentPreSignedUrls } from "../../../apiManager/services/foiOSSService"; import { useParams } from "react-router-dom"; -import { sortDocList } from "./utils"; +import { sortDocList, getDocumentPages } from "./utils"; import { store } from "../../../services/StoreService"; import { setCurrentLayer } from "../../../actions/documentActions"; import DocumentLoader from "../../../containers/DocumentLoader"; @@ -51,6 +52,17 @@ function Home() { let documentObjs = []; let totalPageCountVal = 0; let presignedurls = []; + let deletedDocPages = []; + + fetchDeletedDocumentPages( + foiministryrequestid, + (deletedPages) => { + deletedDocPages = deletedPages; + }, + (error) => + console.log(error) + ); + fetchDocuments( parseInt(foiministryrequestid), async (data) => { @@ -81,7 +93,7 @@ function Home() { }); let sortedFiles = [] sortDocList(_files, null, sortedFiles); - setFiles(sortedFiles); + // setFiles(sortedFiles); setCurrentPageInfo({ file: _files[0] || {}, page: 1 }); if (_files.length > 0) { let urlPromises = []; @@ -98,16 +110,18 @@ function Home() { sortDocList(newDocumentObjs, null, doclist); //prepareMapperObj will add sortorder, stitchIndex and totalPageCount to doclist //and prepare the PageMappedDocs object - prepareMapperObj(doclist); + prepareMapperObj(doclist, deletedDocPages); + const currentDoc = getCurrentDocument(doclist) setCurrentDocument({ - file: doclist[0]?.file || {}, + file: currentDoc?.file || {}, page: 1, - currentDocumentS3Url: doclist[0]?.s3url, + currentDocumentS3Url: currentDoc?.s3url, }); - // localStorage.setItem("currentDocumentS3Url", s3data); - setS3Url(doclist[0]?.s3url); + setS3Url(currentDoc?.s3url); setS3UrlReady(true); setDocsForStitcing(doclist); + //files will have [pages] added + setFiles(doclist.map(_doc => _doc.file)); setTotalPageCount(totalPageCountVal); }, (error) => { @@ -122,6 +136,10 @@ function Home() { ); }, []); + const getCurrentDocument = (doclist) => { + return doclist.find(item => item.file.pagecount > 0); + } + useEffect(() => { fetchRedactionLayerMasterData( foiministryrequestid, @@ -142,7 +160,7 @@ function Home() { } }, [validoipcreviewlayer, redactionLayers]) - const prepareMapperObj = (doclistwithSortOrder) => { + const prepareMapperObj = (doclistwithSortOrder, deletedDocPages) => { let mappedDocs = { stitchedPageLookup: {}, docIdLookup: {}, redlineDocIdLookup: {} }; let mappedDoc = { docId: 0, version: 0, division: "", pageMappings: [] }; @@ -151,24 +169,27 @@ function Home() { let totalPageCount = 0; doclistwithSortOrder.forEach((sortedDoc, _index) => { mappedDoc = { pageMappings: [] }; + const documentId = sortedDoc.file.documentid; + // pages array by removing deleted pages + let pages = getDocumentPages(documentId, deletedDocPages, sortedDoc.file.originalpagecount); let j = 0; - const pages = []; - for (let i = index + 1; i <= index + sortedDoc.file.pagecount; i++) { - j++; + + for (let i = index + 1; i <= index + sortedDoc.file.pagecount; i++) { let pageMapping = { - pageNo: j, + pageNo: pages[j], stitchedPageNo: i, }; mappedDoc.pageMappings.push(pageMapping); mappedDocs["stitchedPageLookup"][i] = { - docid: sortedDoc.file.documentid, + docid: documentId, docversion: sortedDoc.file.version, - page: j, + page: pages[j], }; totalPageCount = i; + j++; } - mappedDocs["docIdLookup"][sortedDoc.file.documentid] = { - docId: sortedDoc.file.documentid, + mappedDocs["docIdLookup"][documentId] = { + docId: documentId, version: sortedDoc.file.version, division: sortedDoc.file.divisions[0].divisionid, pageMappings: mappedDoc.pageMappings, @@ -177,21 +198,24 @@ function Home() { for (let div of sortedDoc.file.divisions) { fileDivisons.push(div.divisionid) } - mappedDocs["redlineDocIdLookup"][sortedDoc.file.documentid] = { - docId: sortedDoc.file.documentid, + mappedDocs["redlineDocIdLookup"][documentId] = { + docId: documentId, version: sortedDoc.file.version, division: fileDivisons, pageMappings: mappedDoc.pageMappings, }; - for (let i = 0; i < sortedDoc.file.pagecount; i++) { - pages.push(i + 1); - } - index = index + sortedDoc.file.pagecount; - sortedDoc.sortorder = _index + 1; - sortedDoc.stitchIndex = stitchIndex; + // added to iterate through the non deleted pages for the left panel functionalities + sortedDoc.file.pages = pages; sortedDoc.pages = pages; - stitchIndex += sortedDoc.file.pagecount; + if (sortedDoc.file.pagecount > 0) { + index = index + sortedDoc.file.pagecount; + //added sortorder to fix the sorting issue for redlining stitching + sortedDoc.file.sortorder = _index + 1; + sortedDoc.sortorder = _index + 1; + sortedDoc.stitchIndex = stitchIndex; + stitchIndex += sortedDoc.file.pagecount; + } }); doclistwithSortOrder.totalPageCount = totalPageCount; setPageMappedDocs(mappedDocs); diff --git a/web/src/components/FOI/Home/Redlining.js b/web/src/components/FOI/Home/Redlining.js index ca409a5e6..07074ad93 100644 --- a/web/src/components/FOI/Home/Redlining.js +++ b/web/src/components/FOI/Home/Redlining.js @@ -35,6 +35,7 @@ import { fetchPDFTronLicense, triggerDownloadRedlines, triggerDownloadFinalPackage, + deleteDocumentPages, savePageFlag, } from "../../../apiManager/services/docReviewerService"; import { @@ -64,7 +65,10 @@ import { getSliceSetDetails, sortDocObjects, sortDocObjectsForRedline, - addWatermarkToRedline + getDocumentPages, + addWatermarkToRedline, + getDocumentsForStitching, + sortBySortOrder } from "./utils"; import { Edit, MultiSelectEdit } from "./Edit"; import _ from "lodash"; @@ -108,11 +112,12 @@ const Redlining = React.forwardRef( const sections = useSelector((state) => state.documents?.sections); const currentLayer = useSelector((state) => state.documents?.currentLayer); const redactionLayers = useAppSelector((state) => state.documents?.redactionLayers); + const deletedDocPages = useAppSelector((state) => state.documents?.deletedDocPages); const viewer = useRef(null); - - const documentList = useAppSelector( - (state) => state.documents?.documentList - ); + const [documentList, setDocumentList] = useState([]); + // const documentList = useAppSelector( + // (state) => state.documents?.documentList + // ); const validoipcreviewlayer = useAppSelector((state) => state.documents?.requestinfo?.validoipcreviewlayer); const [docViewer, setDocViewer] = useState(null); @@ -176,6 +181,11 @@ const Redlining = React.forwardRef( const [includeNRPages, setIncludeNRPages]= useState(false); const [includeDuplicatePages, setIncludeDuplicatePages]= useState(false); const [redlineWatermarkPageMapping, setRedlineWatermarkPageMapping] = useState({}); + const [skipDeletePages, setSkipDeletePages] = useState(false); + const [isDisableNRDuplicate, setIsDisableNRDuplicate] = useState(false); + + const [enableRedactionPanel, setEnableRedactionPanel] = useState(false); + const [clickRedactionPanel, setClickRedactionPanel] = useState(false); //xml parser const parser = new XMLParser(); @@ -183,7 +193,6 @@ const Redlining = React.forwardRef( const isReadyForSignOff = () => { let pageFlagArray = []; let stopLoop = false; - if ( documentList.length > 0 && documentList.length === pageFlags?.length @@ -239,6 +248,8 @@ const Redlining = React.forwardRef( pageFlagTypes["Partial Disclosure"], pageFlagTypes["Full Disclosure"], pageFlagTypes["Withheld in Full"], + pageFlagTypes["Duplicate"], + pageFlagTypes["Not Responsive"], ].includes(flag.flagid) ); if (pageFlagArray.length > 0) { @@ -254,29 +265,66 @@ const Redlining = React.forwardRef( const isValidRedlineDivisionDownload = (divisionid, divisionDocuments) => { let isvalid = false; for (let divObj of divisionDocuments) { - if (divObj.divisionid == divisionid) { - for (let doc of divObj.documentlist) { - for (const flagInfo of doc.pageFlag) { - if ( - flagInfo.flagid != pageFlagTypes["Duplicate"] && - flagInfo.flagid != pageFlagTypes["Not Responsive"] - ) { - if(isvalid == false) { - isvalid = true; - } + if (divObj.divisionid == divisionid) { + // enable the Redline for Sign off if a division has only Incompatable files + if (divObj?.incompatableList?.length > 0) { + if(isvalid == false) { + isvalid = true; + } + } + else { + for (let doc of divObj.documentlist) { + for (const flagInfo of doc.pageFlag) { + // Added condition to handle Duplicate/NR clicked for Redline for Sign off Modal + if ( + (flagInfo.flagid != pageFlagTypes["Duplicate"] && flagInfo.flagid != pageFlagTypes["Not Responsive"]) || + ( + (includeDuplicatePages && flagInfo.flagid === pageFlagTypes["Duplicate"]) || + (includeNRPages && flagInfo.flagid === pageFlagTypes["Not Responsive"]) + ) + ) { + if(isvalid == false) { + isvalid = true; + } + } + } } } } } - } - return isvalid; + return isvalid; }; + const disableNRDuplicate = () => { + let isDisabled = false; + if (pageFlags?.length > 0) { + if (incompatibleFiles.length > 0) { + isDisabled = false; + } + else { + let duplicateNRflags = []; + for (const flagInfo of pageFlags) { + duplicateNRflags = duplicateNRflags.concat(flagInfo.pageflag.filter(flag => flag.flagid === pageFlagTypes["Duplicate"] || flag.flagid === pageFlagTypes["Not Responsive"]) + .map(flag => flag.flagid)); + } + if (docsForStitcing.totalPageCount === duplicateNRflags.length) { + isDisabled = true; + } + } + } + setIsDisableNRDuplicate(isDisabled); + if (isDisabled) { + setIncludeNRPages(isDisabled) + setIncludeDuplicatePages(isDisabled); + } + } + const [enableSavingRedline, setEnableSavingRedline] = useState(false); const [enableSavingOipcRedline, setEnableSavingOipcRedline] = useState(false) const [enableSavingFinal, setEnableSavingFinal] = useState(false); const [filteredComments, setFilteredComments] = useState({}); + const [pagesRemoved, setPagesRemoved] = useState([]); // if using a class, equivalent of componentDidMount useEffect(() => { @@ -314,6 +362,7 @@ const Redlining = React.forwardRef( documentViewer.getTool(instance.Core.Tools.ToolNames.REDACTION) ); const UIEvents = instance.UI.Events; + //customize header - insert a dropdown button const document = instance.UI.iframeWindow.document; setIframeDocument(document); @@ -468,6 +517,18 @@ const Redlining = React.forwardRef( ); }); + instance.UI.setHeaderItems(header => { + header.getHeader('toolbarGroup-Redact') + .get('undoButton').insertBefore({ + type: 'actionButton', + dataElement: 'customRedactionPanel', + img: '', + onClick: () => { + setClickRedactionPanel(true); + } + }); + }); + instance.UI.annotationPopup.add({ type: "customElement", title: "Edit", @@ -534,13 +595,45 @@ const Redlining = React.forwardRef( let localDocumentInfo = currentDocument; if (Object.entries(individualDoc["file"])?.length <= 0) individualDoc = localDocumentInfo; - let doclistCopy = [...docsForStitcing]; + // let doclistCopy = [...docsForStitcing]; + let doclistCopy = getDocumentsForStitching([...docsForStitcing]) + + //Disable the delete Icon if only 1 page for a request + const disableDelete = doclistCopy.length === 1 && doclistCopy[0]?.file?.pagecount === 1; + if (disableDelete) { + instance.UI.disableElements(["thumbDelete","deletePage"]); + } + let slicerdetails = await getSliceSetDetails( doclistCopy.length, true ); - if(doclistCopy.length > 1) + + // Handle deletePages for the first document + let _firstdoc = documentViewer.getDocument(); + const deletedPages = getDeletedPagesBeforeStitching(currentDocument?.file?.documentid); + if (deletedPages.length > 0) { + setSkipDeletePages(true); + await _firstdoc.removePages(deletedPages); + } + + if(doclistCopy.length > 1) { doclistCopy?.shift(); + let setCount = slicerdetails.setcount; + let slicer = slicerdetails.slicer; + let objpreptasks = new Array(setCount); + for (let slicecount = 1; slicecount <= setCount; slicecount++) { + let sliceDoclist = doclistCopy.splice(0, slicer); + objpreptasks.push( + mergeObjectsPreparation( + instance.Core.createDocument, + sliceDoclist, + slicecount + ) + ); + } + Promise.all(objpreptasks); + } let setCount = slicerdetails.setcount; let slicer = slicerdetails.slicer; let objpreptasks = new Array(setCount); @@ -580,6 +673,12 @@ const Redlining = React.forwardRef( }); setFilteredComments(e.detail); }); + //Triggered when the layout has changed because pages have permanently been added, removed, moved or changed in some other way. + documentViewer.addEventListener("pagesUpdated", change => { + if (change.removed.length > 0) { + setPagesRemoved(change.removed) + } + }) documentViewer.addEventListener("click", async () => { scrollLeftPanel(documentViewer.getCurrentPage()); @@ -591,7 +690,38 @@ const Redlining = React.forwardRef( document.body.addEventListener( "click", (e) => { - document.getElementById("saving_menu").style.display = "none"; + document.getElementById("saving_menu").style.display = "none"; + + // toggle between notesPanel and redactionPanel handled here + const toggleNotesButton = document.querySelector( + '[data-element="toggleNotesButton"]' + ); + if (toggleNotesButton) { + toggleNotesButton?.addEventListener("click", function () { + handleRedactionPanelClick(true, instance); + const isActive = toggleNotesButton?.classList.contains("active"); + if (!isActive) { + toggleNotesButton.classList.add("active"); + instance.UI.enableElements(["notesPanel"]); + } + }); + } + + const customRedactionPanel = document.querySelector( + '[data-element="customRedactionPanel"]' + ); + if (customRedactionPanel) { + customRedactionPanel?.addEventListener("click", function () { + if (toggleNotesButton) { + const isActive = toggleNotesButton?.classList.contains("active"); + if (isActive) { + toggleNotesButton.classList.remove("active"); + instance.UI.closeElements(['notesPanel']); + instance.UI.disableElements(["notesPanel"]); + } + } + }); + } //START: Bulk Edit using Multi Select Option //remove MultiSelectedAnnotations on click of multiDeleteButton because post that nothing will be selected. @@ -679,6 +809,74 @@ const Redlining = React.forwardRef( initializeWebViewer(); }, []); + useEffect(() =>{ + if (clickRedactionPanel) { + handleRedactionPanelClick(enableRedactionPanel, docInstance); + setClickRedactionPanel(false); + } + + }, [clickRedactionPanel, enableRedactionPanel]) + + + const handleRedactionPanelClick = (isOpen, instance) => { + if (instance) { + switch (isOpen) { + case true: + instance.UI.closeElements(['redactionPanel']); + instance.UI.disableElements(['redactionPanel']); + setEnableRedactionPanel(false) + break; + case false: + instance.UI.enableElements(['redactionPanel']); + instance.UI.openElements(['redactionPanel']); + setEnableRedactionPanel(true); + break; + } + } + } + + // Get deletePages based on documentid + const getDeletedPagesBeforeStitching = (documentid) => { + let deletedPages = []; + if (deletedDocPages) { + deletedPages = deletedDocPages[documentid] || []; + } + return deletedPages; + } + + useEffect(() => { + // API call to save Deleted Pages to the BE + if (pagesRemoved.length > 0 && pageMappedDocs?.docIdLookup && !skipDeletePages) { + const results = {}; + for (const [docId, obj] of Object.entries(pageMappedDocs.docIdLookup)) { + const { pageMappings } = obj; + for (const mapping of pageMappings) { + if (pagesRemoved.includes(mapping.stitchedPageNo)) { + if (!results[docId]) { + results[docId] = { docid: parseInt(docId), pages: [] }; + } + results[docId].pages.push(mapping.pageNo); + } + } + } + const finalResults = { + redactionlayer: currentLayer?.name, + documentpages: Object.values(results) }; + + deleteDocumentPages( + requestid, + finalResults, + (data) => { + window.location.reload(); + }, + (error) => { + console.log(error); + }, + ); + } + + },[pagesRemoved, skipDeletePages, pageMappedDocs]) + const mergeObjectsPreparation = async ( createDocument, slicedsetofdoclist, @@ -796,7 +994,7 @@ const Redlining = React.forwardRef( fetchPageFlag( requestid, currentLayer.name.toLowerCase(), - docsForStitcing.map(d => d.file.documentid), + getDocumentsForStitching(docsForStitcing)?.map(d => d.file.documentid), (error) => console.log(error) ); } @@ -834,12 +1032,25 @@ const Redlining = React.forwardRef( // This will happen when importing the initial annotations // from the server or individual changes from other users + + /**Fix for lengthy section cutoff issue with response pkg + * download - changed overlaytext to freetext annotations after + * redaction applied*/ + if (info['source'] === 'redactionApplied') { + annotations.forEach((annotation) => { + if(annotation.Subject == "Free Text"){ + docInstance?.Core?.annotationManager.addAnnotation(annotation); + } + }); + } + //oipc changes - begin if (validoipcreviewlayer && currentLayer.name.toLowerCase() === "redline") { return; } //oipc changes - end + if ( info.source !== "redactionApplied" && info.source !== "cancelRedaction" @@ -909,7 +1120,7 @@ const Redlining = React.forwardRef( fetchPageFlag( requestid, currentLayer.name.toLowerCase(), - docsForStitcing.map(d => d.file.documentid), + documentList?.map(d => d.documentid), (error) => console.log(error) ); }, @@ -931,7 +1142,7 @@ const Redlining = React.forwardRef( fetchPageFlag( requestid, currentLayer.name.toLowerCase(), - docsForStitcing.map(d => d.file.documentid), + documentList?.map(d => d.documentid), (error) => console.log(error) ); }, @@ -1106,7 +1317,7 @@ const Redlining = React.forwardRef( fetchPageFlag( requestid, currentLayer.name.toLowerCase(), - docsForStitcing.map(d => d.file.documentid), + documentList?.map(d => d.documentid), (error) => console.log(error) ); }, @@ -1140,7 +1351,7 @@ const Redlining = React.forwardRef( fetchPageFlag( requestid, currentLayer.name.toLowerCase(), - docsForStitcing.map(d => d.file.documentid), + documentList?.map(d => d.documentid), (error) => console.log(error) ); }, @@ -1247,6 +1458,7 @@ const Redlining = React.forwardRef( const checkSavingRedlineButton = (_instance) => { let _enableSavingRedline = isReadyForSignOff() && isValidRedlineDownload(); + disableNRDuplicate(); //oipc changes - begin const _enableSavingOipcRedline = (validoipcreviewlayer === true && currentLayer.name.toLowerCase() === "oipc") && @@ -1296,8 +1508,10 @@ const Redlining = React.forwardRef( }; useEffect(() => { - checkSavingRedlineButton(docInstance); - }, [pageFlags, isStitchingLoaded]); + if (documentList.length > 0 && pageFlags?.length > 0) { + checkSavingRedlineButton(docInstance); + } + }, [pageFlags, isStitchingLoaded, documentList]); const stitchPages = (_doc, pdftronDocObjs) => { @@ -1425,13 +1639,17 @@ const Redlining = React.forwardRef( }; useEffect(() => { + if (docsForStitcing.length > 0) { + setDocumentList(getDocumentsForStitching([...docsForStitcing])?.map(docs => docs.file)); + } if ( pdftronDocObjects?.length > 0 && docsForStitcing.length > 0 && merge && docViewer ) { - let doclistCopy = [...docsForStitcing]; + // let doclistCopy = [...docsForStitcing]; + let doclistCopy = getDocumentsForStitching([...docsForStitcing]) if(doclistCopy.length > 1){ doclistCopy?.shift(); //remove first document from the list let _pdftronDocObjects = sortDocObjects(pdftronDocObjects, doclistCopy); @@ -1466,6 +1684,8 @@ const Redlining = React.forwardRef( } applyAnnotationsFunc(); setIsStitchingLoaded(true); + setPagesRemoved([]); + setSkipDeletePages(false); setpdftronDocObjects([]); setstichedfiles([]); } @@ -1630,7 +1850,7 @@ const Redlining = React.forwardRef( fetchPageFlag( requestid, currentLayer.name.toLowerCase(), - docsForStitcing.map(d => d.file.documentid), + documentList?.map(d => d.documentid), (error) => console.log(error) ); }, @@ -1776,7 +1996,7 @@ const Redlining = React.forwardRef( fetchPageFlag( requestid, currentLayer.name.toLowerCase(), - docsForStitcing.map(d => d.file.documentid), + documentList?.map(d => d.documentid), (error) => console.log(error) ); }, @@ -1935,7 +2155,7 @@ const Redlining = React.forwardRef( fetchPageFlag( requestid, currentLayer.name.toLowerCase(), - docsForStitcing.map(d => d.file.documentid), + documentList?.map(d => d.documentid), (error) => console.log(error) ); }, @@ -2368,7 +2588,10 @@ const Redlining = React.forwardRef( let divDocList = documentList.filter((doc) => doc.divisions.map((d) => d.divisionid).includes(div.divisionid) ); - divDocList = sortByLastModified(divDocList); + + // sort based on sortorder as the sortorder added based on the LastModified + divDocList = sortBySortOrder(divDocList); + let incompatableList = incompatibleFiles.filter((doc) => doc.divisions.map((d) => d.divisionid).includes(div.divisionid) ); @@ -2402,7 +2625,8 @@ const Redlining = React.forwardRef( reqdocuments.push(doc); } } - prepareRedlinePageMappingByRequest(sortByLastModified(reqdocuments)); + // sort based on sortorder as the sortorder added based on the LastModified + prepareRedlinePageMappingByRequest(sortBySortOrder(reqdocuments)); } else { prepareRedlinePageMappingByDivision(divisionDocuments); } @@ -2420,73 +2644,82 @@ const Redlining = React.forwardRef( let NRWatermarksPages = {}; let NRWatermarksPagesEachDiv = []; for (let doc of divisionDocuments) { - let pagesToRemoveEachDoc = []; - pageMappings[doc.documentid] = {}; - //gather pages that need to be removed - doc.pageFlag.sort((a, b) => a.page - b.page); //sort pageflag by page # - for (const flagInfo of doc.pageFlag) { - if (flagInfo.flagid !== pageFlagTypes["Consult"]) { // ignore consult flag to fix bug FOIMOD-3062 - if (flagInfo.flagid == pageFlagTypes["Duplicate"]) { - if(includeDuplicatePages) { - duplicateWatermarkPagesEachDiv.push( - getStitchedPageNoFromOriginal( - doc.documentid, - flagInfo.page, - pageMappedDocs - ) - pagesToRemove.length - ); + if (doc.pagecount > 0) { + let pagesToRemoveEachDoc = []; + pageMappings[doc.documentid] = {}; + //gather pages that need to be removed + doc.pageFlag.sort((a, b) => a.page - b.page); //sort pageflag by page # + let pageIndex = 1; + for (const flagInfo of doc.pageFlag) { + if (flagInfo.flagid !== pageFlagTypes["Consult"]) { // ignore consult flag to fix bug FOIMOD-3062 + if (flagInfo.flagid == pageFlagTypes["Duplicate"]) { + if(includeDuplicatePages) { + duplicateWatermarkPagesEachDiv.push( + getStitchedPageNoFromOriginal( + doc.documentid, + flagInfo.page, + pageMappedDocs + ) - pagesToRemove.length + ); - pageMappings[doc.documentid][flagInfo.page] = - flagInfo.page + - totalPageCount - - pagesToRemoveEachDoc.length; - } else { - pagesToRemoveEachDoc.push(flagInfo.page); - pagesToRemove.push( - getStitchedPageNoFromOriginal( - doc.documentid, - flagInfo.page, - pageMappedDocs - ) - ); - } - } else if (flagInfo.flagid == pageFlagTypes["Not Responsive"]) { - if(includeNRPages) { - NRWatermarksPagesEachDiv.push( - getStitchedPageNoFromOriginal( - doc.documentid, - flagInfo.page, - pageMappedDocs - ) - pagesToRemove.length - ); + pageMappings[doc.documentid][flagInfo.page] = + pageIndex + + totalPageCount - + pagesToRemoveEachDoc.length; + } else { + pagesToRemoveEachDoc.push(flagInfo.page); + pagesToRemove.push( + getStitchedPageNoFromOriginal( + doc.documentid, + flagInfo.page, + pageMappedDocs + ) + ); + } + } else if (flagInfo.flagid == pageFlagTypes["Not Responsive"]) { + if(includeNRPages) { + NRWatermarksPagesEachDiv.push( + getStitchedPageNoFromOriginal( + doc.documentid, + flagInfo.page, + pageMappedDocs + ) - pagesToRemove.length + ); - pageMappings[doc.documentid][flagInfo.page] = - flagInfo.page + - totalPageCount - - pagesToRemoveEachDoc.length; + pageMappings[doc.documentid][flagInfo.page] = + pageIndex + + totalPageCount - + pagesToRemoveEachDoc.length; + } else { + pagesToRemoveEachDoc.push(flagInfo.page); + pagesToRemove.push( + getStitchedPageNoFromOriginal( + doc.documentid, + flagInfo.page, + pageMappedDocs + ) + ); + } } else { - pagesToRemoveEachDoc.push(flagInfo.page); - pagesToRemove.push( - getStitchedPageNoFromOriginal( - doc.documentid, - flagInfo.page, - pageMappedDocs - ) - ); + if (flagInfo.flagid !== pageFlagTypes["Consult"]) { + pageMappings[doc.documentid][flagInfo.page] = + pageIndex + + totalPageCount - + pagesToRemoveEachDoc.length; + pageIndex ++; + } + } + if (flagInfo.flagid !== pageFlagTypes["Consult"]) { + pageIndex ++; } - } else { - pageMappings[doc.documentid][flagInfo.page] = - flagInfo.page + - totalPageCount - - pagesToRemoveEachDoc.length; } } + //End of pageMappingsByDivisions + totalPageCount += Object.keys( + pageMappings[doc.documentid] + ).length; + totalPageCountIncludeRemoved += doc.pagecount; } - //End of pageMappingsByDivisions - totalPageCount += Object.keys( - pageMappings[doc.documentid] - ).length; - totalPageCountIncludeRemoved += doc.pagecount; } divPageMappings['0'] = pageMappings; removepages['0'] = pagesToRemove; @@ -2517,59 +2750,67 @@ const Redlining = React.forwardRef( let NRWatermarksPagesEachDiv = []; for (let divObj of divisionDocuments) { divisionCount++; - for (let doc of sortByLastModified(divObj.documentlist)) { - let pagesToRemoveEachDoc = []; - pageMappings[doc.documentid] = {}; - - //gather pages that need to be removed - doc.pageFlag.sort((a, b) => a.page - b.page); //sort pageflag by page # - //if(isIgnoredDocument(doc, doc['pagecount'], divisionDocuments) == false) { - for (const flagInfo of doc.pageFlag) { - if (flagInfo.flagid !== pageFlagTypes["Consult"]) { // ignore consult flag to fix bug FOIMOD-3062 - if (flagInfo.flagid == pageFlagTypes["Duplicate"]) { - if(includeDuplicatePages) { - duplicateWatermarkPagesEachDiv.push(flagInfo.page + totalPageCountIncludeRemoved - pagesToRemove.length); - - pageMappings[doc.documentid][flagInfo.page] = - flagInfo.page + - totalPageCount - - pagesToRemoveEachDoc.length; + // sort based on sortorder as the sortorder added based on the LastModified + for (let doc of sortBySortOrder(divObj.documentlist)) { + if (doc.pagecount > 0) { + let pagesToRemoveEachDoc = []; + pageMappings[doc.documentid] = {}; + let pageIndex = 1; + //gather pages that need to be removed + doc.pageFlag.sort((a, b) => a.page - b.page); //sort pageflag by page # + //if(isIgnoredDocument(doc, doc['pagecount'], divisionDocuments) == false) { + for (const flagInfo of doc.pageFlag) { + if (flagInfo.flagid !== pageFlagTypes["Consult"]) { // ignore consult flag to fix bug FOIMOD-3062 + if (flagInfo.flagid == pageFlagTypes["Duplicate"]) { + if(includeDuplicatePages) { + duplicateWatermarkPagesEachDiv.push(pageIndex + totalPageCountIncludeRemoved - pagesToRemove.length); + + pageMappings[doc.documentid][flagInfo.page] = + pageIndex + + totalPageCount - + pagesToRemoveEachDoc.length; + } else { + pagesToRemoveEachDoc.push(flagInfo.page); + + pagesToRemove.push( + pageIndex + totalPageCountIncludeRemoved + ); + } + } else if (flagInfo.flagid == pageFlagTypes["Not Responsive"]) { + if(includeNRPages) { + NRWatermarksPagesEachDiv.push(pageIndex + totalPageCountIncludeRemoved - pagesToRemove.length); + + pageMappings[doc.documentid][flagInfo.page] = + pageIndex + + totalPageCount - + pagesToRemoveEachDoc.length; + } else { + pagesToRemoveEachDoc.push(flagInfo.page); + + pagesToRemove.push( + pageIndex + totalPageCountIncludeRemoved + ); + } } else { - pagesToRemoveEachDoc.push(flagInfo.page); - - pagesToRemove.push( - flagInfo.page + totalPageCountIncludeRemoved - ); + if (flagInfo.flagid !== pageFlagTypes["Consult"]) { + pageMappings[doc.documentid][flagInfo.page] = + pageIndex + + totalPageCount - + pagesToRemoveEachDoc.length; + } } - } else if (flagInfo.flagid == pageFlagTypes["Not Responsive"]) { - if(includeNRPages) { - NRWatermarksPagesEachDiv.push(flagInfo.page + totalPageCountIncludeRemoved - pagesToRemove.length); - - pageMappings[doc.documentid][flagInfo.page] = - flagInfo.page + - totalPageCount - - pagesToRemoveEachDoc.length; - } else { - pagesToRemoveEachDoc.push(flagInfo.page); - - pagesToRemove.push( - flagInfo.page + totalPageCountIncludeRemoved - ); + if (flagInfo.flagid !== pageFlagTypes["Consult"]) { + pageIndex ++; } - } else { - pageMappings[doc.documentid][flagInfo.page] = - flagInfo.page + - totalPageCount - - pagesToRemoveEachDoc.length; } } - } - //End of pageMappingsByDivisions - totalPageCount += Object.keys( - pageMappings[doc.documentid] - ).length; - totalPageCountIncludeRemoved += doc.pagecount; + //End of pageMappingsByDivisions + totalPageCount += Object.keys( + pageMappings[doc.documentid] + ).length; + totalPageCountIncludeRemoved += doc.pagecount; //} + } } divPageMappings[divObj.divisionid] = pageMappings; @@ -2907,6 +3148,8 @@ const Redlining = React.forwardRef( setRedlineModalOpen(false); setRedlineSaving(true); setRedlineCategory(modalFor); + // skip deletePages API call for all removePages related to Redline/Response package creation + setSkipDeletePages(true); switch (modalFor) { case "oipcreview": saveRedlineDocument(docInstance, modalFor); @@ -3009,8 +3252,11 @@ const Redlining = React.forwardRef( divCount == res.divdocumentList.length ) { let sorteddocIds = []; - let sorteddocuments = sortByLastModified(documentsObjArr); + + // sort based on sortorder as the sortorder added based on the LastModified + let sorteddocuments = sortBySortOrder(documentsObjArr); stitchDocuments["0"] = setStitchDetails(sorteddocuments); + for(const element of sorteddocuments) { sorteddocIds.push(element['documentid']); } @@ -3025,11 +3271,12 @@ const Redlining = React.forwardRef( res.issingleredlinepackage != "Y" && docCount == div.documentlist.length ) { - //let divdocumentids = documentsObjArr.map((obj) => obj.documentid); - + let divdocumentids = []; - let sorteddocuments = sortByLastModified(div.documentlist); + // sort based on sortorder as the sortorder added based on the LastModified + let sorteddocuments = sortBySortOrder(div.documentlist); stitchDocuments[div.divisionid] = setStitchDetails(sorteddocuments); + for(const element of sorteddocuments) { divdocumentids.push(element['documentid']); } @@ -3055,6 +3302,7 @@ const Redlining = React.forwardRef( summarydocuments: prepareredlinesummarylist(stitchDocuments), redactionlayerid: currentLayer.redactionlayerid }); + if(res.issingleredlinepackage == 'Y' || divisions.length == 1){ stitchSingleDivisionRedlineExport( _instance, @@ -3099,7 +3347,8 @@ const Redlining = React.forwardRef( summarylist.push(summary_division); } let sorteddocids = [] - let sorteddocs = sortByLastModified(alldocuments) + // sort based on sortorder as the sortorder added based on the LastModified + let sorteddocs = sortBySortOrder(alldocuments) for (const sorteddoc of sorteddocs) { sorteddocids.push(sorteddoc['documentid']); } @@ -3144,17 +3393,17 @@ const Redlining = React.forwardRef( //if (isIgnoredDocument(doc, docObj.getPageCount(), divisionDocuments) == false) { docCount++; if (docCount == 1) { + // Delete pages from the first document + const deletedPages = getDeletedPagesBeforeStitching(doc.documentid); + if (deletedPages.length > 0) { + docObj.removePages(deletedPages); + } stitchedDocObj = docObj; } else { - // create an array containing 1…N - let pages = Array.from( - { length: doc.pagecount }, - (v, k) => k + 1 - ); let pageIndexToInsert = stitchedDocObj?.getPageCount() + 1; await stitchedDocObj.insertPages( docObj, - pages, + doc.pages, pageIndexToInsert ); } @@ -3255,7 +3504,13 @@ const Redlining = React.forwardRef( docCount++; setredlineDocCount(docCount); if (isIgnoredDocument(filerow, newDoc, divisionDocuments) === false) { - if (filerow.sortorder === 1) { + if (filerow.stitchIndex === 1) { + // Delete pages from the first document + const deletedPages = getDeletedPagesBeforeStitching(filerow?.documentid); + if (deletedPages.length > 0) { + setSkipDeletePages(true); + await newDoc.removePages(deletedPages); + } stitchedDocObj = newDoc; setstichedfilesForRedline(stitchedDocObj) } else { @@ -3365,6 +3620,17 @@ const Redlining = React.forwardRef( let divisionid = key; let stitchObject = redlineStitchObject[key]; + // if all pages of a division with NR/Duplicate + // and NR/Duplicate is not checked. + // make stitchObject = null to stop the stitching + for (const [documentId, values] of Object.entries(redlinepageMappings["divpagemappings"][divisionid])) { + if(Object.keys(values).length === 0) { + stitchObject = null; + redlineStitchInfo[divisionid]["documentids"] = []; + redlineStitchInfo[divisionid]["stitchpages"] = []; + redlineStitchInfo[divisionid]["s3path"] = null; + } + } if (stitchObject == null) { triggerRedlineZipper( redlineIncompatabileMappings[divisionid], @@ -3522,14 +3788,13 @@ const Redlining = React.forwardRef( let stitchIndex = 1; sortedList.forEach((sortedItem, _index) => { - const pages = []; - for (let i = 0; i < sortedItem.pagecount; i++) { - pages.push(i + 1); - } index = index + sortedItem.pagecount; - sortedItem.sortorder = _index + 1; + // DO NOT setup the sortorder to 1 for 1st divisional document + // as the sort order is used to sort the document irrespective of the division + // sortedItem.sortorder = _index + 1; sortedItem.stitchIndex = stitchIndex; - sortedItem.pages = pages; + // No need to update the pages again as the pages are already updated while preparing prepareMapperObj + // sortedItem.pages = pages; stitchIndex += sortedItem.pagecount; }); return sortedList; @@ -3592,7 +3857,6 @@ const Redlining = React.forwardRef( summarydocuments : prepareresponseredlinesummarylist(documentList), redactionlayerid: currentLayer.redactionlayerid }; - getResponsePackagePreSignedUrl( requestid, documentList[0], @@ -3600,139 +3864,116 @@ const Redlining = React.forwardRef( const toastID = toast.loading("Start generating final package..."); zipServiceMessage.requestnumber = res.requestnumber; zipServiceMessage.bcgovcode = res.bcgovcode; - - // go through annotations and get all section stamps - annotationManager.exportAnnotations().then(async (xfdfString) => { - //parse annotation xml - let jObj = parser.parseFromString(xfdfString); // Assume xmlText contains the example XML - let annots = jObj.getElementsByTagName("annots"); - - let sectionStamps = {}; - let stampJson = {}; - for (const annot of annots[0].children) { - // get section stamps from xml - if (annot.name == "freetext") { - let customData = annot.children.find( - (element) => element.name == "trn-custom-data" + let annotList = annotationManager.getAnnotationsList(); + annotManager.ungroupAnnotations(annotList); + /** remove duplicate and not responsive pages */ + let pagesToRemove = []; + for (const infoForEachDoc of pageFlags) { + for (const pageFlagsForEachDoc of infoForEachDoc.pageflag) { + /** pageflag duplicate or not responsive */ + if ( + pageFlagsForEachDoc.flagid === pageFlagTypes["Duplicate"] || + pageFlagsForEachDoc.flagid === pageFlagTypes["Not Responsive"] + ) { + pagesToRemove.push( + getStitchedPageNoFromOriginal( + infoForEachDoc.documentid, + pageFlagsForEachDoc.page, + pageMappedDocs + ) ); - if ( - customData?.attributes?.bytes?.includes("parentRedaction") - ) { - //parse section info to json - stampJson = JSON.parse( - customData.attributes.bytes - .replace(/"\[/g, "[") - .replace(/\]"/g, "]") - .replace(/"/g, '"') - .replace(/\\/g, "") - ); - sectionStamps[stampJson["parentRedaction"]] = - stampJson["trn-wrapped-text-lines"][0]; - } } } + } + let doc = documentViewer.getDocument(); + await annotationManager.applyRedactions(); + /**must apply redactions before removing pages*/ + await doc.removePages(pagesToRemove); + + const { PDFNet } = _instance.Core; + PDFNet.initialize(); + await stampPageNumberResponse(documentViewer, PDFNet); + toast.update(toastID, { + render: "Saving section stamps...", + isLoading: true, + }); + /**Fixing section cutoff issue in response pkg- + * (For showing section names-freetext annotations are + * added once redactions are applied in the annotationChangedHandler) + * then export & filter freetext & widget annotations + * after redactions applied. + * (widget is needed for showing data from fillable pdfs). + */ + let annotsAfterRedaction = await annotationManager.getAnnotationsList(); + const filteredAnnotations = annotsAfterRedaction.filter(annotation => { + if (_instance.Core.Annotations) { + return ( + annotation instanceof _instance.Core.Annotations.FreeTextAnnotation || + annotation instanceof _instance.Core.Annotations.WidgetAnnotation + ); + } + return false; + } + ); + const xfdfString = await annotationManager.exportAnnotations({ annotationList: filteredAnnotations, widgets:true}); + /** apply redaction and save to s3 - xfdfString is needed to display + * the freetext(section name) on downloaded file.*/ + doc + .getFileData({ + // saves the document with annotations in it + xfdfString:xfdfString, + downloadType: downloadType, + flatten: true + }) + .then(async (_data) => { + const _arr = new Uint8Array(_data); + const _blob = new Blob([_arr], { type: "application/pdf" }); - // add section stamps to redactions as overlay text - let annotList = annotationManager.getAnnotationsList(); toast.update(toastID, { - render: "Saving section stamps...", + render: "Saving final package to Object Storage...", isLoading: true, }); - for (const annot of annotList) { - if (sectionStamps[annot.Id]) { - annotationManager.setAnnotationStyles(annot, { - OverlayText: sectionStamps[annot.Id], - FontSize: Math.min(parseInt(annot.FontSize), 9) + "pt", - }); - } - } - annotManager.ungroupAnnotations(annotList); - - // remove duplicate and not responsive pages - let pagesToRemove = []; - for (const infoForEachDoc of pageFlags) { - for (const pageFlagsForEachDoc of infoForEachDoc.pageflag) { - // pageflag duplicate or not responsive - if ( - pageFlagsForEachDoc.flagid === pageFlagTypes["Duplicate"] || - pageFlagsForEachDoc.flagid === pageFlagTypes["Not Responsive"] - ) { - pagesToRemove.push( - getStitchedPageNoFromOriginal( - infoForEachDoc.documentid, - pageFlagsForEachDoc.page, - pageMappedDocs - ) - ); - } - } - } - - let doc = documentViewer.getDocument(); - await annotationManager.applyRedactions(); // must apply redactions before removing pages - await doc.removePages(pagesToRemove); - - const { PDFNet } = _instance.Core; - PDFNet.initialize(); - await stampPageNumberResponse(documentViewer, PDFNet); - - //apply redaction and save to s3 - doc - .getFileData({ - // saves the document with annotations in it - downloadType: downloadType, - flatten: true - }) - .then(async (_data) => { - const _arr = new Uint8Array(_data); - const _blob = new Blob([_arr], { type: "application/pdf" }); - + saveFilesinS3( + { filepath: res.s3path_save }, + _blob, + (_res) => { toast.update(toastID, { - render: "Saving final package to Object Storage...", - isLoading: true, + render: + "Final package is saved to Object Storage. Page will reload in 3 seconds..", + type: "success", + className: "file-upload-toast", + isLoading: false, + autoClose: 3000, + hideProgressBar: true, + closeOnClick: true, + pauseOnHover: true, + draggable: true, + closeButton: true, }); - saveFilesinS3( - { filepath: res.s3path_save }, - _blob, - (_res) => { - toast.update(toastID, { - render: - "Final package is saved to Object Storage. Page will reload in 3 seconds..", - type: "success", - className: "file-upload-toast", - isLoading: false, - autoClose: 3000, - hideProgressBar: true, - closeOnClick: true, - pauseOnHover: true, - draggable: true, - closeButton: true, - }); - prepareMessageForResponseZipping( - res.s3path_save, - zipServiceMessage - ); - setTimeout(() => { - window.location.reload(true); - }, 3000); - }, - (_err) => { - console.log(_err); - toast.update(toastID, { - render: "Failed to save final package to Object Storage", - type: "error", - className: "file-upload-toast", - isLoading: false, - autoClose: 3000, - hideProgressBar: true, - closeOnClick: true, - pauseOnHover: true, - draggable: true, - closeButton: true, - }); - } + prepareMessageForResponseZipping( + res.s3path_save, + zipServiceMessage ); - }); + setTimeout(() => { + window.location.reload(true); + }, 3000); + }, + (_err) => { + console.log(_err); + toast.update(toastID, { + render: "Failed to save final package to Object Storage", + type: "error", + className: "file-upload-toast", + isLoading: false, + autoClose: 3000, + hideProgressBar: true, + closeOnClick: true, + pauseOnHover: true, + draggable: true, + closeButton: true, + }); + } + ); }); }, (error) => { @@ -3755,7 +3996,8 @@ const Redlining = React.forwardRef( summarylist.push(summary_division); let sorteddocids = [] - let sorteddocs = sortByLastModified(alldocuments) + // sort based on sortorder as the sortorder added based on the LastModified + let sorteddocs = sortBySortOrder(alldocuments) for (const sorteddoc of sorteddocs) { sorteddocids.push(sorteddoc['documentid']); } @@ -3826,6 +4068,8 @@ const Redlining = React.forwardRef( setIncludeDuplicatePages(e.target.checked); } + + return (
@@ -3974,6 +4218,7 @@ const Redlining = React.forwardRef( id="nr-checkbox" checked={includeNRPages} onChange={handleIncludeNRPages} + disabled={isDisableNRDuplicate} />
@@ -3984,6 +4229,7 @@ const Redlining = React.forwardRef( id="duplicate-checkbox" checked={includeDuplicatePages} onChange={handleIncludeDuplicantePages} + disabled={isDisableNRDuplicate} /> } diff --git a/web/src/components/FOI/Home/utils.js b/web/src/components/FOI/Home/utils.js index ace6979f2..de04b3c68 100644 --- a/web/src/components/FOI/Home/utils.js +++ b/web/src/components/FOI/Home/utils.js @@ -2,7 +2,11 @@ export const getStitchedPageNoFromOriginal = (docid, page, pageMappedDocs) => { let stitchedPageNo = 0; if (docid && !Array.isArray(pageMappedDocs)) { let doc = pageMappedDocs?.docIdLookup[docid]; - stitchedPageNo = doc?.pageMappings?.[page - 1].stitchedPageNo; + // stitchedPageNo = doc?.pageMappings?.[page - 1].stitchedPageNo; + let stitchedPage = doc?.pageMappings?.filter(_page => _page.pageNo === page); + if (stitchedPage && stitchedPage.length > 0) { + stitchedPageNo = stitchedPage[0].stitchedPageNo; + } } return stitchedPageNo; }; @@ -100,6 +104,27 @@ export const sortByLastModified = (files) => { return sortedList }; +export const sortBySortOrder = (doclist) => { + doclist?.sort((a, b) => a?.sortorder - b?.sortorder); + return doclist; +} + +// pages array by removing deleted pages +export const getDocumentPages = (documentid, deletedDocPages, originalPagecount) => { + const pages = []; + let deletedPages = []; + if (deletedDocPages) { + deletedPages = deletedDocPages[documentid] || []; + } + for (let i = 0; i < originalPagecount; i++) { + const pageNumber = i + 1; + if (!deletedPages.includes(pageNumber)) { + pages.push(pageNumber); + } + } + return pages; +} + export const getValidSections = (sections, redactionSectionsIds) => { return sections.filter((s) => redactionSectionsIds.indexOf(s.id) > -1); }; @@ -311,4 +336,9 @@ export const addWatermarkToRedline = async (stitchedDocObj, redlineWatermarkPage }, }); } -}; \ No newline at end of file +}; + +// Get only document with Pages in it +export const getDocumentsForStitching = (doclist) => { + return doclist.filter(_doc => _doc.file.pagecount > 0); +} \ No newline at end of file diff --git a/web/src/constants/constants.ts b/web/src/constants/constants.ts index 14569d2a1..97e5b9b4d 100644 --- a/web/src/constants/constants.ts +++ b/web/src/constants/constants.ts @@ -14,7 +14,7 @@ export const SESSION_SECURITY_KEY = "u7x!A%D*G-KaNdRgUkXp2s5v8y/B?E(H"; export const SESSION_LIFETIME = 21600000; export const PDFVIEWER_DISABLED_FEATURES= window._env_?.REACT_APP_PDFVIEWERDISABLED ?? process.env.REACT_APP_PDFVIEWERDISABLED ?? -"linkButton,stickyToolButton,highlightToolButton,freeHandToolButton,freeHandHighlightToolButton,freeTextToolButton,markInsertTextToolButton,markReplaceTextToolButton,textSquigglyToolButton,textStrikeoutToolButton,textRedactToolButton,textUnderlineToolButton,textHighlightToolButton,markReplaceTextGroupButton,markInsertTextGroupButton,strikeoutToolGroupButton,squigglyToolGroupButton,underlineToolGroupButton,highlightToolGroupButton,toolbarGroup-Edit,toolbarGroup-Insert,toolbarGroup-Forms,toolbarGroup-FillAndSign,insertPage,modalRedactButton,annotationRedactButton,richTextFormats,annotationGroupButton,annotationUngroupButton,multiGroupButton,multiUngroupButton"; +"linkButton,stickyToolButton,highlightToolButton,freeHandToolButton,freeHandHighlightToolButton,freeTextToolButton,markInsertTextToolButton,markReplaceTextToolButton,textSquigglyToolButton,textStrikeoutToolButton,textRedactToolButton,textUnderlineToolButton,textHighlightToolButton,markReplaceTextGroupButton,markInsertTextGroupButton,strikeoutToolGroupButton,squigglyToolGroupButton,underlineToolGroupButton,highlightToolGroupButton,toolbarGroup-Edit,toolbarGroup-Insert,toolbarGroup-Forms,toolbarGroup-FillAndSign,insertPage,modalRedactButton,annotationRedactButton,richTextFormats,annotationGroupButton,annotationUngroupButton,multiGroupButton,multiUngroupButton,redactionPanel,redactionPanelToggle"; export const ANNOTATION_PAGE_SIZE = window._env_?.REACT_APP_ANNOTATION_PAGE_SIZE ?? process.env.REACT_APP_ANNOTATION_PAGE_SIZE ?? 500; export const PAGE_SELECT_LIMIT = window._env_?.REACT_APP_PAGE_SELECT_LIMIT ?? process.env.REACT_APP_PAGE_SELECT_LIMIT ?? 250; export const REDACTION_SELECT_LIMIT = window._env_?.REACT_APP_REDACTION_SELECT_LIMIT ?? process.env.REACT_APP_REDACTION_SELECT_LIMIT ?? 250; diff --git a/web/src/modules/documentReducer.ts b/web/src/modules/documentReducer.ts index 668255090..be5bde223 100644 --- a/web/src/modules/documentReducer.ts +++ b/web/src/modules/documentReducer.ts @@ -39,6 +39,8 @@ const documents = (state = initialState, action:any)=> { let layer: any = state.redactionLayers.find((l: any) => l.redactionlayerid === action.payload); layer.count++; return {...state, redactionLayers: state.redactionLayers }; + case ACTION_CONSTANTS.SET_DELETED_PAGES: + return {...state, deletedDocPages: action.payload}; default: return state; }