diff --git a/.gitignore b/.gitignore index 584e119ce..976b15b5e 100644 --- a/.gitignore +++ b/.gitignore @@ -111,3 +111,6 @@ datamigrations/FOIMOD.CFD.ConsoleApp.DocMigration/FOIMOD.CFD.DocMigration.AXIS.D datamigrations/FOIMOD.CFD.ConsoleApp.DocMigration/FOIMOD.CFD.DocMigration.AXIS.DAL.UnitTests/Debug/* datamigrations/FOIMOD.CFD.ConsoleApp.DocMigration/FOIMOD.CFD.DocMigration.FOIFLOW.DAL.Tests/obj/* datamigrations/FOIMOD.CFD.ConsoleApp.DocMigration/FOIMOD.CFD.ConsoleApp.DocMigration/appsettings.dev.json +datamigrations/FOIMOD.CFD.ConsoleApp.DocMigration/*/obj/* +datamigrations/FOIMOD.CFD.ConsoleApp.DocMigration/*/bin/* + diff --git a/docker-compose.yml b/docker-compose.yml index 77423470b..3bcafd591 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -110,6 +110,10 @@ services: - EVENT_QUEUE_PDFSTITCH_LARGE_FILE_STREAMKEY=${EVENT_QUEUE_PDFSTITCH_LARGE_FILE_STREAMKEY} - STREAM_SEPARATION_FILE_SIZE_LIMIT=${STREAM_SEPARATION_FILE_SIZE_LIMIT} - MUTE_NOTIFICATION=${MUTE_NOTIFICATION} + - UNOPENED_REPORT_CUTOFF_DAYS=${UNOPENED_REPORT_CUTOFF_DAYS} + - UNOPENED_REPORT_WAIT_DAYS=${UNOPENED_REPORT_WAIT_DAYS} + - UNOPENED_REPORT_JARO_CUTOFF=${UNOPENED_REPORT_JARO_CUTOFF} + - UNOPENED_REPORT_EMAIL_RECIPIENT=${UNOPENED_REPORT_EMAIL_RECIPIENT} #- LOG_ROOT=${LOG_ROOT} #- LOG_BASIC=${LOG_BASIC} #- LOG_TRACING=${LOG_TRACING} diff --git a/forms-flow-web/src/components/FOI/customComponents/Records/util.js b/forms-flow-web/src/components/FOI/customComponents/Records/util.js index e6ba9615f..44b2d1a33 100644 --- a/forms-flow-web/src/components/FOI/customComponents/Records/util.js +++ b/forms-flow-web/src/components/FOI/customComponents/Records/util.js @@ -4,14 +4,24 @@ export const removeDuplicateFiles = (recordList) => // Helper function to sort files by lastmodified date export const sortByLastModified = (files) => - files.sort((a, b) => new Date(a.lastmodified) - new Date(b.lastmodified)); + files.sort((a, b) => { + let sort = new Date(a.lastmodified) - new Date(b.lastmodified); + if(sort === 0) { + return a.filename.toLowerCase().localeCompare(b.filename.toLowerCase()); + } + return sort; + }); // Helper function to sort attachments by lastmodified date const sortAttachmentsByLastModified = (attachments) => attachments.sort( - (a, b) => - new Date(a?.attributes?.lastmodified) - - new Date(b?.attributes?.lastmodified) + (a, b) => { + let sort = new Date(a?.attributes?.lastmodified) - new Date(b?.attributes?.lastmodified) + if(sort === 0) { + return a.filename.toLowerCase().localeCompare(b.filename.toLowerCase()); + } + return sort; + } ); export const getPDFFilePath = (item) => { @@ -55,7 +65,6 @@ function arrangeAttachments(attachments, parentDocumentMasterId) { // Start arranging attachments from the root level arrangeChildren(parentDocumentMasterId); - getUpdatedRecords(arrangedAttachments, true); return getUpdatedRecords(arrangedAttachments, true); } diff --git a/request-management-api/migrations/versions/932d6dae5570_.py b/request-management-api/migrations/versions/932d6dae5570_.py new file mode 100644 index 000000000..426eef4f9 --- /dev/null +++ b/request-management-api/migrations/versions/932d6dae5570_.py @@ -0,0 +1,32 @@ +"""empty message + +Revision ID: 932d6dae5570 +Revises: b4da31675bd0 +Create Date: 2024-02-05 14:00:56.263099 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = '932d6dae5570' +down_revision = 'd42a1cf67c5c' +branch_labels = None +depends_on = None + + +def upgrade(): + op.execute('insert into public."DocumentTypes"(document_type_name, description) VALUES (\'redline_redaction_summary\', \'Word template for redline redaction summary pdf\');commit;') + op.execute('insert into public."DocumentTypes"(document_type_name, description) VALUES (\'responsepackage_redaction_summary\', \'Word template for response package redaction summary pdf\');commit;') + + op.execute('insert into public."DocumentTemplates"(extension, document_type_id) VALUES (\'docx\', (select document_type_id from public."DocumentTypes" where document_type_name=\'redline_redaction_summary\'));commit;') + op.execute('insert into public."DocumentTemplates"(extension, document_type_id) VALUES (\'docx\', (select document_type_id from public."DocumentTypes" where document_type_name=\'responsepackage_redaction_summary\'));commit;') + +def downgrade(): + op.execute('delete from public."DocumentTypes" where document_type_name = \'redline_redaction_summary\'') + op.execute('delete from public."DocumentTypes" where document_type_name = \'responsepackage_redaction_summary\'') + + op.execute('delete from public."DocumentTemplates" where document_type_id = (select document_type_id from public."DocumentTypes" where document_type_name=\'redline_redaction_summary\')') + op.execute('delete from public."DocumentTemplates" where document_type_id = (select document_type_id from public."DocumentTypes" where document_type_name=\'responsepackage_redaction_summary\')') + diff --git a/request-management-api/migrations/versions/d42a1cf67c5c_.py b/request-management-api/migrations/versions/d42a1cf67c5c_.py new file mode 100644 index 000000000..4a61359be --- /dev/null +++ b/request-management-api/migrations/versions/d42a1cf67c5c_.py @@ -0,0 +1,35 @@ +"""empty message + +Revision ID: d42a1cf67c5c +Revises: b4da31675bd0 +Create Date: 2024-02-08 12:40:33.968711 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = 'd42a1cf67c5c' +down_revision = 'b4da31675bd0' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('UnopenedReport', + sa.Column('id', sa.Integer(), primary_key=True, autoincrement=True, nullable=False), + sa.Column('rawrequestid', sa.Integer(), nullable=False), + sa.Column('date', sa.DateTime(), nullable=True), + sa.Column('rank', sa.Integer(), nullable=False), + sa.Column('potentialmatches', postgresql.JSON(astext_type=sa.Text()), nullable=True) + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.execute('DROP TABLE IF EXISTS public."UnopenedReport";') + + # ### end Alembic commands ### diff --git a/request-management-api/request_api/models/FOIMinistryRequests.py b/request-management-api/request_api/models/FOIMinistryRequests.py index 9f2dd5e42..71660a41f 100644 --- a/request-management-api/request_api/models/FOIMinistryRequests.py +++ b/request-management-api/request_api/models/FOIMinistryRequests.py @@ -9,7 +9,6 @@ from sqlalchemy import or_, and_, text, func, literal, cast, case, nullslast, nullsfirst, desc, asc from sqlalchemy.sql.sqltypes import String from sqlalchemy.dialects.postgresql import JSON - from .FOIRequestApplicantMappings import FOIRequestApplicantMapping from .FOIRequestApplicants import FOIRequestApplicant from .FOIRequestStatus import FOIRequestStatus @@ -584,7 +583,8 @@ def getrequestssubquery(cls, groups, filterfields, keyword, additionalfilter, us *joincondition_oipc ), isouter=True - ).filter(or_(FOIMinistryRequest.requeststatuslabel != StateName.closed.name, and_(FOIMinistryRequest.isoipcreview == True, FOIMinistryRequest.requeststatusid == 3, subquery_with_oipc.c.outcomeid == None))) + ).filter(or_(FOIMinistryRequest.requeststatuslabel != StateName.closed.name, + and_(FOIMinistryRequest.isoipcreview == True, FOIMinistryRequest.requeststatusid == 3, subquery_with_oipc.c.outcomeid == None))) if(additionalfilter == 'watchingRequests'): #watchby @@ -706,42 +706,35 @@ def getgroupfilters(cls, groups): ministryfilter = FOIMinistryRequest.isactive == True else: groupfilter = [] - for group in groups: - if (group == IAOTeamWithKeycloackGroup.flex.value or group in ProcessingTeamWithKeycloackGroup.list()): - groupfilter.append( - and_( - FOIMinistryRequest.assignedgroup == group - ) - ) - elif (group == IAOTeamWithKeycloackGroup.intake.value): - groupfilter.append( - or_( - FOIMinistryRequest.assignedgroup == group, + statusfilter = None + processinggroups = list(set(groups).intersection(ProcessingTeamWithKeycloackGroup.list())) + if IAOTeamWithKeycloackGroup.intake.value in groups or len(processinggroups) > 0: + groupfilter.append( and_( - FOIMinistryRequest.assignedgroup == IAOTeamWithKeycloackGroup.flex.value, - FOIMinistryRequest.requeststatuslabel.in_([StateName.open.name]) + FOIMinistryRequest.assignedgroup.in_(tuple(groups)) ) ) - ) - else: - groupfilter.append( + statusfilter = FOIMinistryRequest.requeststatuslabel != StateName.closed.name + else: + groupfilter.append( or_( - FOIMinistryRequest.assignedgroup == group, + FOIMinistryRequest.assignedgroup.in_(tuple(groups)), and_( - FOIMinistryRequest.assignedministrygroup == group, - FOIMinistryRequest.requeststatuslabel.in_([StateName.callforrecords.name,StateName.recordsreview.name,StateName.feeestimate.name,StateName.consult.name,StateName.ministrysignoff.name,StateName.onhold.name,StateName.deduplication.name,StateName.harmsassessment.name,StateName.response.name,StateName.peerreview.name,StateName.tagging.name,StateName.readytoscan.name]) + FOIMinistryRequest.assignedministrygroup.in_(tuple(groups)) ) ) ) - + statusfilter = FOIMinistryRequest.requeststatuslabel.in_([StateName.callforrecords.name,StateName.recordsreview.name,StateName.feeestimate.name,StateName.consult.name,StateName.ministrysignoff.name,StateName.onhold.name,StateName.deduplication.name,StateName.harmsassessment.name,StateName.response.name,StateName.peerreview.name,StateName.tagging.name,StateName.readytoscan.name]) ministryfilter = and_( FOIMinistryRequest.isactive == True, FOIRequestStatus.isactive == True, or_(*groupfilter) ) - - ministryfilterwithclosedoipc = or_(ministryfilter, and_(FOIMinistryRequest.isoipcreview == True, FOIMinistryRequest.requeststatuslabel == StateName.closed.name)) - + ministryfilterwithclosedoipc = and_(ministryfilter, + or_(statusfilter, + and_(FOIMinistryRequest.isoipcreview == True, FOIMinistryRequest.requeststatuslabel == StateName.closed.name) + ) + ) return ministryfilterwithclosedoipc @classmethod diff --git a/request-management-api/request_api/models/FOIRawRequests.py b/request-management-api/request_api/models/FOIRawRequests.py index 238fb178e..0c540ea30 100644 --- a/request-management-api/request_api/models/FOIRawRequests.py +++ b/request-management-api/request_api/models/FOIRawRequests.py @@ -1076,6 +1076,82 @@ def getlatestsection5pendings(cls): finally: db.session.close() return section5pendings + + @classmethod + def getunopenedunactionedrequests(cls, startdate, enddate): + try: + requests = [] + sql = '''select rr.created_at, rr.requestrawdata, rr.requestid, coalesce(p.status, '') as status, + coalesce(p.transaction_number, '') as txnno, + coalesce(p.total::text, '') as fee + from public."FOIRawRequests" rr + join ( + select max(version) as version, requestid from public."FOIRawRequests" + group by requestid + ) mv on mv.requestid = rr.requestid and mv.version = rr.version + left join ( + select request_id, max(payment_id) from public."Payments" + where fee_code_id = 1 + group by request_id + order by request_id + ) mp on mp.request_id = rr.requestid + left join public."Payments" p on p.payment_id = mp.max + where rr.status = 'Unopened' and rr.version = 1 and created_at > :startdate and created_at < :enddate + order by rr.requestid ''' + rs = db.session.execute(text(sql), {'startdate': startdate, 'enddate': enddate}) + for row in rs: + requests.append({ + "requestid": row["requestid"], + "created_at": row["created_at"], + "requestrawdata": row["requestrawdata"], + "paymentstatus": row["status"], + "fee": row["fee"], + "txnno": row["txnno"] + }) + except Exception as ex: + logging.error(ex) + raise ex + finally: + db.session.close() + return requests + + @classmethod + def getpotentialactionedmatches(cls, request): + try: + requests = [] + sql = '''select rr.created_at, rr.* from public."FOIRawRequests" rr + join ( + select max(version) as version, requestid from public."FOIRawRequests" + group by requestid + ) mv on mv.requestid = rr.requestid and mv.version = rr.version + where status = 'Intake in Progress' and ( + requestrawdata->>'lastName' ilike :lastName + or requestrawdata->>'firstName' ilike :firstName + or requestrawdata->>'email' ilike :email + or requestrawdata->>'address' ilike :address + or requestrawdata->>'phonePrimary' ilike :phonePrimary + or requestrawdata->>'postal' ilike :postal + ) and substring(rr.requestrawdata->>'receivedDateUF', 1, 10) = :receiveddate + order by requestid desc, version desc ''' + rs = db.session.execute(text(sql), { + 'lastName': request['requestrawdata']['contactInfo']['lastName'], + 'firstName': request['requestrawdata']['contactInfo']['firstName'], + 'email': request['requestrawdata']['contactInfoOptions']['email'], + 'address': request['requestrawdata']['contactInfoOptions']['address'], + 'phonePrimary': request['requestrawdata']['contactInfoOptions']['phonePrimary'], + 'postal': request['requestrawdata']['contactInfoOptions']['postal'], + 'receiveddate': request['requestrawdata']['receivedDateUF'][0:10], + }) + for row in rs: + requests.append({"requestid": row["requestid"], "created_at": row["created_at"], "requestrawdata": row["requestrawdata"]}) + except Exception as ex: + logging.error(ex) + raise ex + finally: + db.session.close() + return requests + + class FOIRawRequestSchema(ma.Schema): class Meta: diff --git a/request-management-api/request_api/models/UnopenedReport.py b/request-management-api/request_api/models/UnopenedReport.py new file mode 100644 index 000000000..dd8a75f43 --- /dev/null +++ b/request-management-api/request_api/models/UnopenedReport.py @@ -0,0 +1,22 @@ +from .db import db, ma +from .default_method_result import DefaultMethodResult +from sqlalchemy.orm import relationship,backref +from datetime import datetime +from sqlalchemy import text +from sqlalchemy.dialects.postgresql import JSON +import json + +class UnopenedReport(db.Model): + __tablename__ = 'UnopenedReport' + # Defining the columns + id = db.Column(db.Integer, primary_key=True,autoincrement=True) + rawrequestid = db.Column(db.Text, unique=False, nullable=False) + date = db.Column(db.Text, unique=False, nullable=False) + rank = db.Column(db.Text, unique=False, nullable=False) + potentialmatches = db.Column(JSON, unique=False, nullable=False) + + @classmethod + def insert(cls, row): + db.session.add(row) + db.session.commit() + return DefaultMethodResult(True,'Report Row added',row.rawrequestid) \ No newline at end of file diff --git a/request-management-api/request_api/resources/request.py b/request-management-api/request_api/resources/request.py index fcec8a2e5..335a4fac8 100644 --- a/request-management-api/request_api/resources/request.py +++ b/request-management-api/request_api/resources/request.py @@ -25,6 +25,7 @@ from request_api.services.rawrequestservice import rawrequestservice from request_api.services.documentservice import documentservice from request_api.services.eventservice import eventservice +from request_api.services.unopenedreportservice import unopenedreportservice from request_api.utils.enums import StateName import json import asyncio @@ -323,4 +324,22 @@ def post(requestid=None): except ValueError: return {'status': 500, 'message':"Invalid Request"}, 400 except BusinessException as exception: - return {'status': exception.status_code, 'message':exception.message}, 500 \ No newline at end of file + return {'status': exception.status_code, 'message':exception.message}, 500 + +@cors_preflight('POST,OPTIONS') +@API.route('/foirawrequest/unopenedreport') +class FOIRawRequestReport(Resource): + """Generates report of unopened requests that have not been actioned in over X amount of days""" + + + @staticmethod + @TRACER.trace() + @cross_origin(origins=allowedorigins()) + @auth.require + def post(): + try: + result = unopenedreportservice().generateunopenedreport() + # responsecode = 200 if result.success == True else 500 + return {'status': True, 'message': result} , 200 + except BusinessException as exception: + return {'status': exception.status_code, 'message':exception.message}, 500 \ No newline at end of file diff --git a/request-management-api/request_api/services/email/senderservice.py b/request-management-api/request_api/services/email/senderservice.py index d15069ca7..4c5034e4a 100644 --- a/request-management-api/request_api/services/email/senderservice.py +++ b/request-management-api/request_api/services/email/senderservice.py @@ -30,12 +30,12 @@ class senderservice: """ - def send(self, servicekey, content, _messageattachmentlist, requestjson): + def send(self, subject, content, _messageattachmentlist, requestjson): logging.debug("Begin: Send email for request = "+json.dumps(requestjson)) msg = MIMEMultipart() msg['From'] = MAIL_FROM_ADDRESS msg['To'] = requestjson["email"] - msg['Subject'] = templateconfig().getsubject(servicekey, requestjson) + msg['Subject'] = subject part = MIMEText(content, "html") msg.attach(part) # Add Attachment and Set mail headers diff --git a/request-management-api/request_api/services/emailservice.py b/request-management-api/request_api/services/emailservice.py index 2d16c7c35..6df55855c 100644 --- a/request-management-api/request_api/services/emailservice.py +++ b/request-management-api/request_api/services/emailservice.py @@ -34,7 +34,8 @@ def send(self, servicename, requestid, ministryrequestid, emailschema): servicename = _templatename.upper() if _templatename else "" _messageattachmentlist = self.__get_attachments(ministryrequestid, emailschema, servicename) self.__pre_send_correspondence_audit(requestid, ministryrequestid,emailschema, content, templateconfig().isnotreceipt(servicename), _messageattachmentlist) - return senderservice().send(servicename, _messagepart, _messageattachmentlist, requestjson) + subject = templateconfig().getsubject(servicename, requestjson) + return senderservice().send(subject, _messagepart, _messageattachmentlist, requestjson) except Exception as ex: logging.exception(ex) diff --git a/request-management-api/request_api/services/unopenedreportservice.py b/request-management-api/request_api/services/unopenedreportservice.py new file mode 100644 index 000000000..2fbaddad7 --- /dev/null +++ b/request-management-api/request_api/services/unopenedreportservice.py @@ -0,0 +1,150 @@ + +from request_api.models.FOIRawRequests import FOIRawRequest +from request_api.models.UnopenedReport import UnopenedReport +from request_api.services.email.senderservice import senderservice +from os import getenv +from datetime import timedelta, date +from jaro import jaro_winkler_metric +import json +import logging +from math import inf + +class unopenedreportservice: + """ + This service generates a report of unopened unactioned requests + + """ + + dayscutoff = getenv('UNOPENED_REPORT_CUTOFF_DAYS', 10) + waitdays = getenv('UNOPENED_REPORT_WAIT_DAYS', 5) + jarocutoff = getenv('UNOPENED_REPORT_JARO_CUTOFF', 0.8) + reportemail = getenv('UNOPENED_REPORT_EMAIL_RECIPIENT') + + + def generateunopenedreport(self): + startdate = date.today() - timedelta(days=int(self.dayscutoff)) + enddate = date.today() - timedelta(days=int(self.waitdays)) + requests = FOIRawRequest.getunopenedunactionedrequests(str(startdate), str(enddate)) + alerts = [] + for request in requests: + potentialmatches = FOIRawRequest.getpotentialactionedmatches(request) + if len(potentialmatches) == 0: + alert = UnopenedReport() + alert.rawrequestid = request['requestid'] + alert.date = date.today() + alert.rank = 1 + UnopenedReport.insert(alert) + alerts.append({"request": request, "rank": 1}) + else: + highscore = 0 + for match in potentialmatches: + match['score'] = jaro_winkler_metric( + request['requestrawdata']['descriptionTimeframe']['description'].replace('\n', ' ').replace('\t', ' '), + match['requestrawdata']['description'] + ) + if match['score'] > highscore: + highscore = match['score'] + alert = UnopenedReport() + alert.rawrequestid = request['requestid'] + alert.date = date.today() + alert.rank = 2 + alert.potentialmatches = {"highscore": round(highscore, 2), "matches": [{ + "requestid": m["requestrawdata"]['axisRequestId'], + "similarity": round(m['score'], 2) + } for m in potentialmatches]} + UnopenedReport.insert(alert) + alerts.append({"request": request, "rank": 2, "potentialmatches": alert.potentialmatches}) + alerts.sort(key=lambda a : a.get('potentialmatches', {'highscore': 0})['highscore']) + senderservice().send( + subject="Intake Unopened Request Report: " + str(date.today()), + content=self.generateemailhtml(alerts), + _messageattachmentlist=[], + requestjson={"email": self.reportemail, "topic": "Unopened Report"} + ) + return alerts + + + def generateemailhtml(self, alerts): + emailhtml = """ +
This is a report for unopened requests in the past """ + self.dayscutoff + """ days that have not yet been actioned.
+Rank 1: Very likely to be unactioned — unable to find a request with any matching applicant info
+Unopened ID | +Date Received | +Ministry Selected | +Applicant First Name | +Applicant Last Name | +Payment Status | +Receipt Number | +Application Fee | +Description | +
---|---|---|---|---|---|---|---|---|
U-000''' + str(alert['request']['requestid']) + ''' | +''' + alert['request']['requestrawdata']['receivedDate'] + ''' | +''' + for m in alert['request']['requestrawdata']['ministry']['selectedMinistry']: + emailhtml += (m['code'] + ' ') + emailhtml += ''' | +''' + alert['request']['requestrawdata']['contactInfo']['firstName'] + ''' | +''' + alert['request']['requestrawdata']['contactInfo']['lastName'] + ''' | +''' + alert['request']['paymentstatus'] + ''' | +''' + alert['request']['txnno'] + ''' | +''' + alert['request']['fee'] + ''' | +''' + alert['request']['requestrawdata']['descriptionTimeframe']['description'][0:99] + '''... | +
Rank 2: Possibly unactioned — requests found but some applicant info is mismatching — please double check
+Unopened ID | +Date Received | +Ministry Selected | +Applicant First Name | +Applicant Last Name | +Payment Status | +Receipt Number | +Application Fee | +Potential Matches | +Description | +
---|---|---|---|---|---|---|---|---|---|
U-000''' + str(alert['request']['requestid']) + ''' | +''' + alert['request']['requestrawdata']['receivedDate'] + ''' | +''' + for m in alert['request']['requestrawdata']['ministry']['selectedMinistry']: + emailhtml += (m['code'] + ' ') + emailhtml += ''' | +''' + alert['request']['requestrawdata']['contactInfo']['firstName'] + ''' | +''' + alert['request']['requestrawdata']['contactInfo']['lastName'] + ''' | +''' + alert['request']['paymentstatus'] + ''' | +''' + alert['request']['txnno'] + ''' | +''' + alert['request']['fee'] + ''' | +
+ '''
+ for m in alert['potentialmatches']['matches']:
+ emailhtml += (m['requestid'] + " - similarity: " + str(m['similarity']*100) + "% ") + emailhtml = emailhtml[:-4] + emailhtml += ''' |
+ ''' + alert['request']['requestrawdata']['descriptionTimeframe']['description'][0:99] + '''... | +