diff --git a/app/jobs/bgs_share_error_fix_job.rb b/app/jobs/bgs_share_error_fix_job.rb new file mode 100644 index 00000000000..9fccbd6f9ec --- /dev/null +++ b/app/jobs/bgs_share_error_fix_job.rb @@ -0,0 +1,78 @@ +# frozen_string_literal: true + +class BgsShareErrorFixJob < CaseflowJob + ERROR_TEXT = "ShareError" + STUCK_JOB_REPORT_SERVICE = StuckJobReportService.new + + def perform + clear_hlr_errors if hlrs_with_errors.present? + clear_rius_errors if rius_with_errors.present? + clear_bge_errors if bges_with_errors.present? + STUCK_JOB_REPORT_SERVICE.write_log_report(ERROR_TEXT) + end + + def clear_rius_errors + STUCK_JOB_REPORT_SERVICE.append_record_count(rius_with_errors.count, ERROR_TEXT) + rius_with_errors.each do |riu| + epe = EndProductEstablishment.find_by( + id: riu.review_id + ) + next if epe.established_at.blank? + + resolve_error_on_records(riu) + STUCK_JOB_REPORT_SERVICE.append_single_record(riu.class.name, riu.id) + end + STUCK_JOB_REPORT_SERVICE.append_record_count(rius_with_errors.count, ERROR_TEXT) + end + + def clear_hlr_errors + STUCK_JOB_REPORT_SERVICE.append_record_count(hlrs_with_errors.count, ERROR_TEXT) + + hlrs_with_errors.each do |hlr| + epe = EndProductEstablishment.find_by( + veteran_file_number: hlr.veteran_file_number + ) + next if epe.established_at.blank? + + resolve_error_on_records(hlr) + STUCK_JOB_REPORT_SERVICE.append_single_record(hlr.class.name, hlr.id) + end + STUCK_JOB_REPORT_SERVICE.append_record_count(hlrs_with_errors.count, ERROR_TEXT) + end + + def clear_bge_errors + STUCK_JOB_REPORT_SERVICE.append_record_count(bges_with_errors.count, ERROR_TEXT) + + bges_with_errors.each do |bge| + next if bge.end_product_establishment.established_at.blank? + + resolve_error_on_records(bge) + STUCK_JOB_REPORT_SERVICE.append_single_record(bge.class.name, bge.id) + end + STUCK_JOB_REPORT_SERVICE.append_record_count(bges_with_errors.count, ERROR_TEXT) + end + + def hlrs_with_errors + HigherLevelReview.where("establishment_error ILIKE?", "%#{ERROR_TEXT}%") + end + + def rius_with_errors + RequestIssuesUpdate.where("error ILIKE?", "%#{ERROR_TEXT}%") + end + + def bges_with_errors + BoardGrantEffectuation.where("decision_sync_error ILIKE?", "%#{ERROR_TEXT}%") + end + + private + + # :reek:FeatureEnvy + def resolve_error_on_records(object_type) + ActiveRecord::Base.transaction do + object_type.clear_error! + rescue StandardError => error + log_error(error) + STUCK_JOB_REPORT_SERVICE.append_errors(object_type.class.name, object_type.id, error) + end + end +end diff --git a/app/jobs/cannot_delete_contention_remediation_job.rb b/app/jobs/cannot_delete_contention_remediation_job.rb index c1547b1d08a..ef6b83cd07c 100644 --- a/app/jobs/cannot_delete_contention_remediation_job.rb +++ b/app/jobs/cannot_delete_contention_remediation_job.rb @@ -6,9 +6,13 @@ class CannotDeleteContentionRemediationJob < CaseflowJob queue_with_priority :low_priority + # Sub folder name + S3_FOLDER_NAME = "data-remediation-output" + def initialize @logs = ["\nVBMS::CannotDeleteContention Remediation Log"] @remediated_request_issues_update_ids = [] + @folder_name = (Rails.deploy_env == :prod) ? S3_FOLDER_NAME : "#{S3_FOLDER_NAME}-#{Rails.deploy_env}" super end @@ -166,28 +170,10 @@ def sync_epe!(request_issues_update, request_issue, index) " Resetting EPE synced_status to null. Syncing Epe with EP.") end - # Save Logs to S3 Bucket def store_logs_in_s3_bucket - # Set Client Resources for AWS - Aws.config.update(region: "us-gov-west-1") - s3client = Aws::S3::Client.new - s3resource = Aws::S3::Resource.new(client: s3client) - s3bucket = s3resource.bucket("data-remediation-output") - # Folder and File name - file_name = "cannot-delete-contention-remediation-logs/cdc-remediation-log-#{Time.zone.now}" - - # Store contents of logs array in a temporary file content = @logs.join("\n") - temporary_file = Tempfile.new("cdc-log.txt") - filepath = temporary_file.path - temporary_file.write(content) - temporary_file.flush - - # Store File in S3 bucket - s3bucket.object(file_name).upload_file(filepath, acl: "private", server_side_encryption: "AES256") - - # Delete Temporary File - temporary_file.close! + file_name = "cannot-delete-contention-remediation-logs/cdc-remediation-log-#{Time.zone.now}" + S3Service.store_file("#{@folder_name}/#{file_name}", content) end end diff --git a/app/jobs/claim_date_dt_fix_job.rb b/app/jobs/claim_date_dt_fix_job.rb new file mode 100644 index 00000000000..fbbffe6cc31 --- /dev/null +++ b/app/jobs/claim_date_dt_fix_job.rb @@ -0,0 +1,50 @@ +# frozen_string_literal: true + +class ClaimDateDtFixJob < CaseflowJob + ERROR_TEXT = "ClaimDateDt" + + attr_reader :stuck_job_report_service + + def initialize + @stuck_job_report_service = StuckJobReportService.new + end + + def perform + process_decision_documents + end + + def process_decision_documents + return if decision_docs_with_errors.blank? + + stuck_job_report_service.append_record_count(decision_docs_with_errors.count, ERROR_TEXT) + + decision_docs_with_errors.each do |single_decision_document| + next unless valid_decision_document?(single_decision_document) + + process_decision_document(single_decision_document) + end + + stuck_job_report_service.append_record_count(decision_docs_with_errors.count, ERROR_TEXT) + + stuck_job_report_service.write_log_report(ERROR_TEXT) + end + + def valid_decision_document?(decision_document) + decision_document.processed_at.present? && + decision_document.uploaded_to_vbms_at.present? + end + + # :reek:FeatureEnvy + def process_decision_document(decision_document) + ActiveRecord::Base.transaction do + decision_document.clear_error! + rescue StandardError => error + log_error(error) + stuck_job_report_service.append_errors(decision_document.class.name, decision_document.id, error) + end + end + + def decision_docs_with_errors + DecisionDocument.where("error ILIKE ?", "%#{ERROR_TEXT}%") + end +end diff --git a/app/jobs/claim_not_established_fix_job.rb b/app/jobs/claim_not_established_fix_job.rb new file mode 100644 index 00000000000..dd7b7ebb76b --- /dev/null +++ b/app/jobs/claim_not_established_fix_job.rb @@ -0,0 +1,54 @@ +# frozen_string_literal: true + +class ClaimNotEstablishedFixJob < CaseflowJob + ERROR_TEXT = "Claim not established." + EPECODES = %w[030 040 930 682].freeze + + attr_reader :stuck_job_report_service + + def initialize + @stuck_job_report_service = StuckJobReportService.new + end + + def perform + return if decision_docs_with_errors.blank? + + stuck_job_report_service.append_record_count(decision_docs_with_errors.count, ERROR_TEXT) + + decision_docs_with_errors.each do |single_decision_document| + file_number = single_decision_document.veteran.file_number + epe_array = EndProductEstablishment.where(veteran_file_number: file_number) + validated_epes = epe_array.map { |epe| validate_epe(epe) } + + stuck_job_report_service.append_single_record(single_decision_document.class.name, single_decision_document.id) + + resolve_error_on_records(single_decision_document, validated_epes) + end + + stuck_job_report_service.append_record_count(decision_docs_with_errors.count, ERROR_TEXT) + stuck_job_report_service.write_log_report(ERROR_TEXT) + end + + def decision_docs_with_errors + DecisionDocument.where("error ILIKE ?", "%#{ERROR_TEXT}%") + end + + def validate_epe(epe) + epe_code = epe&.code&.slice(0, 3) + EPECODES.include?(epe_code) && epe&.established_at.present? + end + + private + + # :reek:FeatureEnvy + def resolve_error_on_records(object_type, epes_array) + ActiveRecord::Base.transaction do + if !epes_array.include?(false) + object_type.clear_error! + end + rescue StandardError => error + log_error(error) + stuck_job_report_service.append_errors(object_type.class.name, object_type.id, error) + end + end +end diff --git a/app/jobs/contention_not_found_remediation_job.rb b/app/jobs/contention_not_found_remediation_job.rb index 9728f871f17..62a90376834 100644 --- a/app/jobs/contention_not_found_remediation_job.rb +++ b/app/jobs/contention_not_found_remediation_job.rb @@ -6,9 +6,12 @@ class ContentionNotFoundRemediationJob < CaseflowJob queue_with_priority :low_priority + S3_FOLDER_NAME = "data-remediation-output" + def initialize @logs = ["\nVBMS::ContentionNotFound Remediation Log"] @remediated_request_issues_update_ids = [] + @folder_name = (Rails.deploy_env == :prod) ? S3_FOLDER_NAME : "#{S3_FOLDER_NAME}-#{Rails.deploy_env}" super end @@ -141,26 +144,8 @@ def sync_epe!(request_issues_update, request_issue, index) # Save Logs to S3 Bucket def store_logs_in_s3_bucket - # Set Client Resources for AWS - Aws.config.update(region: "us-gov-west-1") - s3client = Aws::S3::Client.new - s3resource = Aws::S3::Resource.new(client: s3client) - s3bucket = s3resource.bucket("data-remediation-output") - - # Folder and File name - file_name = "contention-not-found-remediation-logs/cnf-remediation-log-#{Time.zone.now}" - - # Store contents of logs array in a temporary file content = @logs.join("\n") - temporary_file = Tempfile.new("cnf-log.txt") - filepath = temporary_file.path - temporary_file.write(content) - temporary_file.flush - - # Store File in S3 bucket - s3bucket.object(file_name).upload_file(filepath, acl: "private", server_side_encryption: "AES256") - - # Delete Temporary File - temporary_file.close! + file_name = "contention-not-found-remediation-logs/cnf-remediation-log-#{Time.zone.now}" + S3Service.store_file("#{@folder_name}/#{file_name}", content) end end diff --git a/app/jobs/dta_sc_creation_failed_fix_job.rb b/app/jobs/dta_sc_creation_failed_fix_job.rb new file mode 100644 index 00000000000..8a6a077f6c7 --- /dev/null +++ b/app/jobs/dta_sc_creation_failed_fix_job.rb @@ -0,0 +1,36 @@ +# frozen_string_literal: true + +class DtaScCreationFailedFixJob < CaseflowJob + ERROR_TEXT = "DTA SC Creation Failed" + + # :reek:FeatureEnvy + def perform + stuck_job_report_service = StuckJobReportService.new + return if hlrs_with_errors.blank? + + stuck_job_report_service.append_record_count(hlrs_with_errors.count, ERROR_TEXT) + + hlrs_with_errors.each do |hlr| + next unless SupplementalClaim.find_by( + decision_review_remanded_id: hlr.id, + decision_review_remanded_type: "HigherLevelReview" + ) + + stuck_job_report_service.append_single_record(hlr.class.name, hlr.id) + + ActiveRecord::Base.transaction do + hlr.clear_error! + rescue StandardError => error + log_error(error) + stuck_job_report_service.append_error(hlr.class.name, hlr.id, error) + end + end + + stuck_job_report_service.append_record_count(hlrs_with_errors.count, ERROR_TEXT) + stuck_job_report_service.write_log_report(ERROR_TEXT) + end + + def hlrs_with_errors + HigherLevelReview.where("establishment_error ILIKE ?", "%#{ERROR_TEXT}%") + end +end diff --git a/app/jobs/duplicate_ep_remediation_job.rb b/app/jobs/duplicate_ep_remediation_job.rb index f1289becbb8..a1ac9570eda 100644 --- a/app/jobs/duplicate_ep_remediation_job.rb +++ b/app/jobs/duplicate_ep_remediation_job.rb @@ -1,6 +1,6 @@ # frozen_string_literal: true -class DuplicateEpRemediationJob < ApplicationJob +class DuplicateEpRemediationJob < CaseflowJob queue_with_priority :low_priority application_attr :intake def perform diff --git a/app/jobs/sc_dta_for_appeal_fix_job.rb b/app/jobs/sc_dta_for_appeal_fix_job.rb new file mode 100644 index 00000000000..2c969b343a9 --- /dev/null +++ b/app/jobs/sc_dta_for_appeal_fix_job.rb @@ -0,0 +1,45 @@ +# frozen_string_literal: true + +class ScDtaForAppealFixJob < CaseflowJob + ERRORTEXT = "Can't create a SC DTA for appeal" + + def records_with_errors + DecisionDocument.where("error ILIKE ?", "%#{ERRORTEXT}%") + end + + def sc_dta_for_appeal_fix + stuck_job_report_service = StuckJobReportService.new + return if records_with_errors.blank? + + # count of records with errors before fix + stuck_job_report_service.append_record_count(records_with_errors.count, ERRORTEXT) + + records_with_errors.each do |decision_doc| + claimant = decision_doc.appeal.claimant + + next unless claimant.payee_code.nil? + + if claimant.type == "VeteranClaimant" + claimant.update!(payee_code: "00") + elsif claimant.type == "DependentClaimant" + claimant.update!(payee_code: "10") + end + stuck_job_report_service.append_single_record(decision_doc.class.name, decision_doc.id) + clear_error_on_record(decision_doc) + end + + # record count with errors after fix + stuck_job_report_service.append_record_count(records_with_errors.count, ERRORTEXT) + stuck_job_report_service.write_log_report(ERRORTEXT) + end + + # :reek:FeatureEnvy + def clear_error_on_record(decision_doc) + ActiveRecord::Base.transaction do + decision_doc.clear_error! + rescue StandardError => error + log_error(error) + stuck_job_report_service.append_errors(decision_doc.class.name, decision_doc.id, error) + end + end +end diff --git a/app/services/stuck_job_report_service.rb b/app/services/stuck_job_report_service.rb new file mode 100644 index 00000000000..e6e03d28ab1 --- /dev/null +++ b/app/services/stuck_job_report_service.rb @@ -0,0 +1,44 @@ +# frozen_string_literal: true + +# StuckJobReportService is a generic shared class that creates the logs +# sent to S3. The logs give the count before the remediation and +# the count after the remediation. + +# The logs also contain the Id of the record that has been updated + +class StuckJobReportService + attr_reader :logs, :folder_name + + S3_FOLDER_NAME = "data-remediation-output" + + def initialize + @logs = ["#{Time.zone.now} ********** Remediation Log Report **********"] + @folder_name = (Rails.deploy_env == :prod) ? S3_FOLDER_NAME : "#{S3_FOLDER_NAME}-#{Rails.deploy_env}" + end + + # Logs the Id and the object that is being updated + def append_single_record(class_name, id) + logs.push("\n#{Time.zone.now} Record Type: #{class_name} - Record ID: #{id}.") + end + + def append_error(class_name, id, error) + logs.push("\n#{Time.zone.now} Record Type: #{class_name}"\ + " - Record ID: #{id}. Encountered #{error}, record not updated.") + end + + # Gets the record count of the record type passed in. + def append_record_count(records_with_errors_count, text) + logs.push("\n#{Time.zone.now} #{text}::Log - Total number of Records with Errors: #{records_with_errors_count}") + end + + def write_log_report(report_text) + create_file_name = report_text.split.join("-").downcase + upload_logs(create_file_name) + end + + def upload_logs(create_file_name) + content = logs.join("\n") + file_name = "#{create_file_name}-logs/#{create_file_name}-log-#{Time.zone.now}" + S3Service.store_file("#{folder_name}/#{file_name}", content) + end +end diff --git a/lib/helpers/dupp_ep_claims_sync_status_update_can_clr.rb b/lib/helpers/dupp_ep_claims_sync_status_update_can_clr.rb index 5736d529e51..710d7fef629 100644 --- a/lib/helpers/dupp_ep_claims_sync_status_update_can_clr.rb +++ b/lib/helpers/dupp_ep_claims_sync_status_update_can_clr.rb @@ -11,37 +11,45 @@ module WarRoom class DuppEpClaimsSyncStatusUpdateCanClr + S3_FOLDER_NAME = "data-remediation-output" + REPORT_TEXT = "duplicate-ep-remediation" def initialize @logs = ["VBMS::DuplicateEP Remediation Log"] + @folder_name = (Rails.deploy_env == :prod) ? S3_FOLDER_NAME : "#{S3_FOLDER_NAME}-#{Rails.deploy_env}" end def resolve_dup_ep - if retrieve_problem_reviews.count.zero? - Rails.logger.info("No records with errors found.") - return false - end + return unless retrieve_reviews_count >= 1 starting_record_count = retrieve_problem_reviews.count @logs.push("#{Time.zone.now} DuplicateEP::Log Job Started .") @logs.push("#{Time.zone.now} DuplicateEP::Log"\ " Records with errors: #{starting_record_count} .") - ActiveRecord::Base.transaction do - resolve_duplicate_end_products(retrieve_problem_reviews, starting_record_count) + resolve_or_throw_error(retrieve_problem_reviews, starting_record_count) + @logs.push("#{Time.zone.now} DuplicateEP::Log"\ + " Resolved records: #{resolved_record_count(starting_record_count, retrieve_problem_reviews.count)} .") + @logs.push("#{Time.zone.now} DuplicateEP::Log"\ + " Records with errors: #{retrieve_problem_reviews.count} .") + @logs.push("#{Time.zone.now} DuplicateEP::Log Job completed .") + Rails.logger.info(@logs) + end + + def resolve_or_throw_error(reviews, count) + ActiveRecord::Base.transaction do + resolve_duplicate_end_products(reviews, count) rescue StandardError => error @logs.push("An error occurred: #{error.message}") raise error end + end - final_count = retrieve_problem_reviews.count - - @logs.push("#{Time.zone.now} DuplicateEP::Log"\ - " Resolved records: #{resolved_record_count(starting_record_count, final_count)} .") - @logs.push("#{Time.zone.now} DuplicateEP::Log"\ - " Records with errors: #{retrieve_problem_reviews.count} .") - @logs.push("#{Time.zone.now} DuplicateEP::Log Job completed .") - Rails.logger.info(@logs) + def retrieve_reviews_count + if retrieve_problem_reviews.count.zero? + Rails.logger.info("No records with errors found.") + end + retrieve_problem_reviews.count end # finding reviews that potentially need resolution @@ -88,7 +96,6 @@ def resolve_single_review(review_id, type) def resolve_duplicate_end_products(reviews, _starting_record_count) reviews.each do |review| vet = review.veteran - verb = "start" # get the end products from the veteran end_products = vet.end_products @@ -98,36 +105,43 @@ def resolve_duplicate_end_products(reviews, _starting_record_count) # Check if active duplicate exists next if active_duplicates(end_products, single_end_product_establishment).present? - verb = "established" - single_ep_update(single_end_product_establishment) + ep2e = single_end_product_establishment.send(:end_product_to_establish) + epmf = EndProductModifierFinder.new(single_end_product_establishment, vet) + taken = epmf.send(:taken_modifiers).compact + + log_start_retry(single_end_product_establishment, vet) + + # Mark place to start retrying + epmf.instance_variable_set(:@taken_modifiers, taken.push(ep2e.modifier)) + ep2e.modifier = epmf.find + single_end_product_establishment.instance_variable_set(:@end_product_to_establish, ep2e) + single_end_product_establishment.establish! + + log_complete(single_end_product_establishment, vet) end call_decision_review_process_job(review, vet) end end - def single_ep_update(single_end_product_establishment) - ep2e = single_end_product_establishment.send(:end_product_to_establish) - epmf = EndProductModifierFinder.new(single_end_product_establishment, vet) - taken = epmf.send(:taken_modifiers).compact - - @logs.push("#{Time.zone.now} DuplicateEP::Log"\ - " Veteran participant ID: #{vet.participant_id}."\ - " Review: #{review.class.name}. EPE ID: #{single_end_product_establishment.id}."\ - " EP status: #{single_end_product_establishment.status_type_code}."\ - " Status: Starting retry.") - - # Mark place to start retrying - epmf.instance_variable_set(:@taken_modifiers, taken.push(ep2e.modifier)) - ep2e.modifier = epmf.find - single_end_product_establishment.instance_variable_set(:@end_product_to_establish, ep2e) - single_end_product_establishment.establish! + # :reek:FeatureEnvy + def log_start_retry(end_product_establishment, veteran) + @logs.push("#{Time.zone.now} DuplicateEP::Log "\ + "Veteran participant ID: #{veteran.participant_id}. "\ + "Review: #{end_product_establishment.class.name}. "\ + "EPE ID: #{end_product_establishment.id}. "\ + "EP status: #{end_product_establishment.status_type_code}. "\ + "Status: Starting retry.") + end - @logs.push("#{Time.zone.now} DuplicateEP::Log"\ - " Veteran participant ID: #{vet.participant_id}. Review: #{review.class.name}."\ - " EPE ID: #{single_end_product_establishment.id}."\ - " EP status: #{single_end_product_establishment.status_type_code}."\ - " Status: Complete.") + # :reek:FeatureEnvy + def log_complete(end_product_establishment, veteran) + @logs.push("#{Time.zone.now} DuplicateEP::Log "\ + "Veteran participant ID: #{veteran.participant_id}. "\ + "Review: #{end_product_establishment.class.name}. "\ + "EPE ID: #{end_product_establishment.id}. "\ + "EP status: #{end_prcloduct_establishment.status_type_code}. "\ + "Status: Complete.") end def resolved_record_count(starting_record_count, final_count) @@ -161,30 +175,18 @@ def call_decision_review_process_job(review, vet) @logs.push(" #{Time.zone.now} | Veteran participant ID: #{vet.participant_id}"\ " | Review: #{review.class.name} | Review ID: #{review.id} | status: Failed | Error: #{error}") else - create_log + create_log(REPORT_TEXT) end end - def create_log - content = @logs.join("\n") - temporary_file = Tempfile.new("cdc-log.txt") - filepath = temporary_file.path - temporary_file.write(content) - temporary_file.flush - - upload_logs_to_s3_bucket(filepath) - - temporary_file.close! + def create_log(report_text) + upload_logs_to_s3_bucket(report_text) end - def upload_logs_to_s3_bucket(filepath) - s3client = Aws::S3::Client.new - s3resource = Aws::S3::Resource.new(client: s3client) - s3bucket = s3resource.bucket("data-remediation-output") - file_name = "duplicate-ep-remediation-logs/duplicate-ep-remediation-log-#{Time.zone.now}" - - # Store file to S3 bucket - s3bucket.object(file_name).upload_file(filepath, acl: "private", server_side_encryption: "AES256") + def upload_logs_to_s3_bucket(create_file_name) + content = @logs.join("\n") + file_name = "#{create_file_name}-logs/#{create_file_name}-log-#{Time.zone.now}" + S3Service.store_file("#{@folder_name}/#{file_name}", content) end end end diff --git a/lib/helpers/remand_dta_or_doo_higher_level_review.rb b/lib/helpers/remand_dta_or_doo_higher_level_review.rb index 4e3d491fab8..c2f6d74e762 100644 --- a/lib/helpers/remand_dta_or_doo_higher_level_review.rb +++ b/lib/helpers/remand_dta_or_doo_higher_level_review.rb @@ -1,15 +1,19 @@ # frozen_string_literal: true module WarRoom - # Purpose: to find Higher Level Reviews with Duty to Assist (DTA) or Difference of Opinion (DOO) # decision issues and remand them to generate Supplemental Claims class RemandDtaOrDooHigherLevelReview + S3_FOLDER_NAME = "appeals-dbas" + + def initialize + @folder_name = (Rails.deploy_env == :prod) ? S3_FOLDER_NAME : "#{S3_FOLDER_NAME}-#{Rails.deploy_env}" + end # Currently, HLRs missing SCs are tracked in OAR report loads that are sent over and then # uploaded to the EP Establishment Workaround table # This method implements logic to remand SCs for a specified report load number - def run_by_report_load(report_load, env='prod') + def run_by_report_load(report_load, env = "prod") # Set the user RequestStore[:current_user] = User.system_user @@ -34,73 +38,72 @@ def run_by_report_load(report_load, env='prod') # Grab qualifying HLRs from the specified report load def get_hlrs(rep_load, conn) raw_sql = <<~SQL - WITH oar_list as (SELECT epw."reference_id" AS "reference_id", - epw."veteran_file_number" AS "veteran_file_number", - epw."synced_status" AS "synced_status", - epw."report_load" AS "report_load", - epe."source_id" AS "source_id", - epe."source_type" AS "source_type" - FROM "public"."ep_establishment_workaround" epw - LEFT JOIN "public"."end_product_establishments" epe - ON epw."reference_id" = epe."reference_id" - WHERE epe.source_type = 'HigherLevelReview' AND report_load = '#{rep_load}'), - no_ep_list as (SELECT distinct oar_list.* - FROM oar_list - LEFT JOIN "public"."request_issues" ri - ON (oar_list."source_id" = ri."decision_review_id" - AND oar_list."source_type" = ri."decision_review_type") - LEFT JOIN "public"."request_decision_issues" rdi - ON ri."id" = rdi."request_issue_id" - LEFT JOIN "public"."decision_issues" di - ON rdi."decision_issue_id" = di."id" - LEFT JOIN "public"."supplemental_claims" sc - ON (oar_list."source_id" = sc."decision_review_remanded_id" - AND oar_list."source_type" = sc."decision_review_remanded_type") - LEFT JOIN "public"."end_product_establishments" epe - ON sc."id" = epe."source_id" AND epe."source_type" = 'SupplementalClaim' - WHERE oar_list."synced_status" = 'CLR' - AND (di."disposition" = 'Difference of Opinion' - OR di."disposition" = 'DTA Error' - OR di."disposition" = 'DTA Error - Exam/MO' - OR di."disposition" = 'DTA Error - Fed Recs' - OR di."disposition" = 'DTA Error - Other Recs' - OR di."disposition" = 'DTA Error - PMRs') - AND (sc."decision_review_remanded_id" IS NULL - OR epe."source_id" IS NULL)), - no_040_ep as (SELECT * - FROM oar_list - intersect - SELECT * - FROM no_ep_list), - no_040_sync as (SELECT distinct reference_id, - COUNT(no_040_ep.reference_id) FILTER (WHERE report_load = '#{rep_load}') OVER (PARTITION BY no_040_ep.reference_id) as decision_issue_count, - COUNT(no_040_ep.reference_id) FILTER (WHERE report_load = '#{rep_load}' AND (decision_sync_processed_at IS NOT NULL OR closed_at IS NOT NULL)) OVER (PARTITION BY no_040_ep.reference_id) as synced_count - FROM no_040_ep - LEFT JOIN "public"."request_issues" ri - ON (no_040_ep."source_id" = ri."decision_review_id" - AND no_040_ep."source_type" = ri."decision_review_type")), - histogram_raw_data as (select no_040_ep.*, decision_issue_count, synced_count, - extc."CLAIM_ID" as vbms_claim_id, - extc."LIFECYCLE_STATUS_CHANGE_DATE" as vbms_closed_at, - DATE_PART('day', CURRENT_DATE - extc."LIFECYCLE_STATUS_CHANGE_DATE") as age_days - FROM no_040_ep - INNER JOIN no_040_sync ON no_040_ep.reference_id = no_040_sync.reference_id - left join vbms_ext_claim extc - on extc."CLAIM_ID" = no_040_ep.reference_id::numeric) - SELECT reference_id - FROM histogram_raw_data - WHERE decision_issue_count = synced_count + WITH oar_list as (SELECT epw."reference_id" AS "reference_id", + epw."veteran_file_number" AS "veteran_file_number", + epw."synced_status" AS "synced_status", + epw."report_load" AS "report_load", + epe."source_id" AS "source_id", + epe."source_type" AS "source_type" + FROM "public"."ep_establishment_workaround" epw + LEFT JOIN "public"."end_product_establishments" epe + ON epw."reference_id" = epe."reference_id" + WHERE epe.source_type = 'HigherLevelReview' AND report_load = '#{rep_load}'), + no_ep_list as (SELECT distinct oar_list.* + FROM oar_list + LEFT JOIN "public"."request_issues" ri + ON (oar_list."source_id" = ri."decision_review_id" + AND oar_list."source_type" = ri."decision_review_type") + LEFT JOIN "public"."request_decision_issues" rdi + ON ri."id" = rdi."request_issue_id" + LEFT JOIN "public"."decision_issues" di + ON rdi."decision_issue_id" = di."id" + LEFT JOIN "public"."supplemental_claims" sc + ON (oar_list."source_id" = sc."decision_review_remanded_id" + AND oar_list."source_type" = sc."decision_review_remanded_type") + LEFT JOIN "public"."end_product_establishments" epe + ON sc."id" = epe."source_id" AND epe."source_type" = 'SupplementalClaim' + WHERE oar_list."synced_status" = 'CLR' + AND (di."disposition" = 'Difference of Opinion' + OR di."disposition" = 'DTA Error' + OR di."disposition" = 'DTA Error - Exam/MO' + OR di."disposition" = 'DTA Error - Fed Recs' + OR di."disposition" = 'DTA Error - Other Recs' + OR di."disposition" = 'DTA Error - PMRs') + AND (sc."decision_review_remanded_id" IS NULL + OR epe."source_id" IS NULL)), + no_040_ep as (SELECT * + FROM oar_list + intersect + SELECT * + FROM no_ep_list), + no_040_sync as (SELECT distinct reference_id, + COUNT(no_040_ep.reference_id) FILTER (WHERE report_load = '#{rep_load}') OVER (PARTITION BY no_040_ep.reference_id) as decision_issue_count, + COUNT(no_040_ep.reference_id) FILTER (WHERE report_load = '#{rep_load}' AND (decision_sync_processed_at IS NOT NULL OR closed_at IS NOT NULL)) OVER (PARTITION BY no_040_ep.reference_id) as synced_count + FROM no_040_ep + LEFT JOIN "public"."request_issues" ri + ON (no_040_ep."source_id" = ri."decision_review_id" + AND no_040_ep."source_type" = ri."decision_review_type")), + histogram_raw_data as (select no_040_ep.*, decision_issue_count, synced_count, + extc."CLAIM_ID" as vbms_claim_id, + extc."LIFECYCLE_STATUS_CHANGE_DATE" as vbms_closed_at, + DATE_PART('day', CURRENT_DATE - extc."LIFECYCLE_STATUS_CHANGE_DATE") as age_days + FROM no_040_ep + INNER JOIN no_040_sync ON no_040_ep.reference_id = no_040_sync.reference_id + left join vbms_ext_claim extc + on extc."CLAIM_ID" = no_040_ep.reference_id::numeric) + SELECT reference_id + FROM histogram_raw_data + WHERE decision_issue_count = synced_count SQL conn.execute(raw_sql) end # Method to remand supplemental claims - def call_remand(ep_ref, conn) + def call_remand(ep_ref, _conn) begin epe = EndProductEstablishment.find_by(reference_id: ep_ref) epe.source.create_remand_supplemental_claims! - rescue StandardError => error @logs.push("RemandDtaOrDooHigherLevelReview::Error -- Reference id #{ep_ref}"\ "Time: #{Time.zone.now}"\ @@ -111,26 +114,8 @@ def call_remand(ep_ref, conn) # Save Logs to S3 Bucket def store_logs_in_s3_bucket(report_load, env) # Set Client Resources for AWS - Aws.config.update(region: "us-gov-west-1") - s3client = Aws::S3::Client.new - s3resource = Aws::S3::Resource.new(client: s3client) - s3bucket = s3resource.bucket("appeals-dbas") - - # Path to folder and file name file_name = "ep_establishment_workaround/#{env}/remand_hlr_logs/remand_dta_or_doo_hlr_report_load_#{report_load}-#{Time.zone.now}" - - # Store contents of logs array in a temporary file - content = @logs.join("\n") - temporary_file = Tempfile.new("remand_hlr_log.txt") - filepath = temporary_file.path - temporary_file.write(content) - temporary_file.flush - - # Store File in S3 bucket - s3bucket.object(file_name).upload_file(filepath, acl: "private", server_side_encryption: "AES256") - - # Delete Temporary File - temporary_file.close! + S3Service.store_file("#{@folder_name}/#{file_name}", @logs) end end end diff --git a/spec/jobs/bgs_share_error_fix_job_spec.rb b/spec/jobs/bgs_share_error_fix_job_spec.rb new file mode 100644 index 00000000000..b0c27e0dab5 --- /dev/null +++ b/spec/jobs/bgs_share_error_fix_job_spec.rb @@ -0,0 +1,119 @@ +# frozen_string_literal: true + +describe BgsShareErrorFixJob, :postgres do + let(:share_error) { "BGS::ShareError" } + let(:file_number) { "123456789" } + let!(:veteran) { create(:veteran, file_number: file_number) } + let!(:hlr) do + create(:higher_level_review, + establishment_error: share_error, + veteran_file_number: file_number) + end + let!(:epe) do + create(:end_product_establishment, + source: hlr, + established_at: Time.zone.now, + veteran_file_number: file_number) + end + + subject { described_class.new } + + context "BGS::ShareError" do + context "HLR" do + context "when the error exists on HigherLevelReview" + describe "when EPE has established_at date" do + it "clears the BGS::ShareError on the HLR" do + subject.perform + expect(hlr.reload.establishment_error).to be_nil + end + end + describe "when EPE does not have established_at date" do + it "does not clear the BGS::ShareError on the HLR" do + epe.update(established_at: nil) + subject.perform + expect(hlr.reload.establishment_error).to eq(share_error) + end + end + context "when the hlr does not have the BGS::ShareError" do + it "does not attempt to clear the error" do + hlr.update(establishment_error: nil) + subject.perform + expect(hlr.reload.establishment_error).to eq(nil) + end + end + end + + context "RIU" do + let!(:hlr_2) { create(:higher_level_review) } + + let!(:riu) do + create(:request_issues_update, + error: share_error, + review_id: 65, + review_type: hlr_2) + end + let!(:epe_2) do + create(:end_product_establishment, + id: riu.review_id, + established_at: Time.zone.now, + veteran_file_number: 3_231_213_123) + end + + context "when the error exists on RIU" + describe "when EPE has established_at date" do + it "clears the BGS::ShareError on the RIU" do + subject.perform + expect(riu.reload.error).to be_nil + end + end + describe "when EPE does not have established_at date" do + it "does not clear the BGS::ShareError on the RIU" do + epe_2.update(established_at: nil) + subject.perform + expect(riu.reload.error).to eq(share_error) + end + end + context "when the RIU does not have the BGS::ShareError" do + it "does not attempt to clear the error" do + riu.update(error: nil) + subject.perform + expect(riu.reload.error).to eq(nil) + end + end + end + + context "BGE" do + let!(:epe_3) do + create(:end_product_establishment, + established_at: Time.zone.now, veteran_file_number: 88_888_888) + end + let!(:bge) do + create(:board_grant_effectuation, + end_product_establishment_id: epe_3.id, + decision_sync_error: share_error) + end + + context "when the error exists on BGE" + describe "when EPE has established_at date" do + it "clear_error!" do + subject.perform + expect(bge.reload.decision_sync_error).to be_nil + end + end + describe "if EPE does not have established_at" do + it "clears the BGS::ShareError on the BGE" do + epe_3.update(established_at: nil) + subject.perform + expect(bge.reload.decision_sync_error).to eq(share_error) + end + end + context "when the BGE does not have the BGS::ShareError" do + it "does not attempt to clear the error" do + bge.update(decision_sync_error: nil) + subject.perform + expect(bge.reload.decision_sync_error).to eq(nil) + end + end + end + end +end diff --git a/spec/jobs/claim_date_dt_fix_job_spec.rb b/spec/jobs/claim_date_dt_fix_job_spec.rb new file mode 100644 index 00000000000..14cfbb889f4 --- /dev/null +++ b/spec/jobs/claim_date_dt_fix_job_spec.rb @@ -0,0 +1,57 @@ +# frozen_string_literal: true + +describe ClaimDateDtFixJob, :postres do + let(:claim_date_dt_error) { "ClaimDateDt" } + + let!(:decision_doc_with_error) do + create( + :decision_document, + error: claim_date_dt_error, + processed_at: 7.days.ago, + uploaded_to_vbms_at: 7.days.ago + ) + end + + subject { described_class.new } + + before do + create_list(:decision_document, 5) + create_list(:decision_document, 2, error: claim_date_dt_error, processed_at: 7.days.ago, + uploaded_to_vbms_at: 7.days.ago) + end + + context "when error, processed_at and uploaded_to_vbms_at are populated" do + it "clears the error field" do + expect(subject.decision_docs_with_errors.count).to eq(3) + subject.perform + + expect(decision_doc_with_error.reload.error).to be_nil + expect(subject.decision_docs_with_errors.count).to eq(0) + end + end + + context "when either uploaded_to_vbms_at or processed_at are nil" do + describe "when upladed_to_vbms_at is nil" do + it "does not clear the error field" do + decision_doc_with_error.update(uploaded_to_vbms_at: nil) + + expect(decision_doc_with_error.error).to eq("ClaimDateDt") + + subject.perform + + expect(decision_doc_with_error.reload.error).not_to be_nil + end + end + + describe "when processed_at is nil" do + it "does not clear the error field" do + decision_doc_with_error.update(processed_at: nil) + expect(decision_doc_with_error.error).to eq("ClaimDateDt") + + subject.perform + + expect(decision_doc_with_error.reload.error).not_to be_nil + end + end + end +end diff --git a/spec/jobs/claim_not_established_fix_job_spec.rb b/spec/jobs/claim_not_established_fix_job_spec.rb new file mode 100644 index 00000000000..12f44b3d12b --- /dev/null +++ b/spec/jobs/claim_not_established_fix_job_spec.rb @@ -0,0 +1,125 @@ +# frozen_string_literal: true + +describe ClaimNotEstablishedFixJob, :postgres do + let(:claim_not_established_error) { "Claim not established." } + let!(:veteran_file_number) { "111223333" } + let!(:veteran) { create(:veteran, file_number: veteran_file_number) } + let(:appeal) { create(:appeal, veteran_file_number: veteran_file_number) } + + let!(:decision_doc_with_error) do + create( + :decision_document, + error: claim_not_established_error, + processed_at: 7.days.ago, + uploaded_to_vbms_at: 7.days.ago, + appeal: appeal + ) + end + + let!(:epe) do + create( + :end_product_establishment, + code: "030BGRNR", + source: decision_doc_with_error, + veteran_file_number: veteran_file_number, + established_at: Time.zone.now + ) + end + + subject { described_class.new } + + context "#claim_not_established" do + context "when code and established_at are present on epe" do + it "clears the error field when epe code is 030" do + epe.update(code: "030") + subject.perform + + expect(decision_doc_with_error.reload.error).to be_nil + end + + it "clears the error field when epe code is 040" do + epe.update(code: "040") + subject.perform + + expect(decision_doc_with_error.reload.error).to be_nil + end + + it "clears the error field when epe code is 930" do + epe.update(code: "930") + subject.perform + + expect(decision_doc_with_error.reload.error).to be_nil + end + + it "clears the error field when epe code is 682" do + epe.update(code: "682") + subject.perform + + expect(decision_doc_with_error.reload.error).to be_nil + end + end + + context "When either code or established_at are missing on epe" do + describe "when code and established_at are nil" do + it "does not clear error on decision_document" do + epe.update(code: nil) + epe.update(established_at: nil) + subject.perform + + expect(decision_doc_with_error.reload.error).to eq(claim_not_established_error) + end + end + + describe "when code is nil" do + it "does not clear error on decision_document" do + epe.update(code: nil) + subject.perform + + expect(decision_doc_with_error.reload.error).to eq(claim_not_established_error) + end + end + + describe "when established_at is nil" do + it "does not clear error on decision_document" do + epe.update(established_at: nil) + subject.perform + + expect(decision_doc_with_error.reload.error).to eq(claim_not_established_error) + end + end + end + + context "When a decision document has multiple end product establishments" do + before do + create( + :end_product_establishment, + code: "930AMADOR", + source: decision_doc_with_error, + veteran_file_number: veteran_file_number, + established_at: Time.zone.now + ) + create( + :end_product_establishment, + code: "040SCR", + source: decision_doc_with_error, + veteran_file_number: veteran_file_number, + established_at: Time.zone.now + ) + end + describe "when all epes are validated as true" do + it "clears the error on the decision document" do + subject.perform + + expect(decision_doc_with_error.reload.error).to be_nil + end + end + + it "does not clear the error" do + epe.update(established_at: nil) + subject.perform + + expect(decision_doc_with_error.reload.error).to eq(claim_not_established_error) + end + end + end +end diff --git a/spec/jobs/dta_sc_creation_failed_fix_job_spec.rb b/spec/jobs/dta_sc_creation_failed_fix_job_spec.rb new file mode 100644 index 00000000000..a68ce65f72b --- /dev/null +++ b/spec/jobs/dta_sc_creation_failed_fix_job_spec.rb @@ -0,0 +1,39 @@ +# frozen_string_literal: true + +describe DtaScCreationFailedFixJob, :postgres do + let(:dta_error) { "DTA SC creation failed" } + let!(:veteran_file_number) { "111223333" } + let!(:veteran) { create(:veteran, file_number: veteran_file_number) } + + let!(:hlr) { create(:higher_level_review, veteran_file_number: veteran_file_number, establishment_error: dta_error) } + let!(:sc) { create(:supplemental_claim, veteran_file_number: veteran_file_number, decision_review_remanded: hlr) } + + context "#dta_sc_creation_failed_fix" do + subject { described_class.new("higher_level_review", dta_error) } + + context "When SC has decision_review_remanded_id and decision_review_remanded_type" do + it "clears the error field on related HLR" do + subject.perform + expect(hlr.reload.establishment_error).to be_nil + end + end + + context "When either decision_review_remanded_id or decision_review_remanded_type values are nil" do + describe "when decision_review_remanded_id is nil" do + it "does not clear error field on related HLR" do + sc.update(decision_review_remanded_id: nil) + subject.perform + expect(hlr.reload.establishment_error).to eql(dta_error) + end + end + + describe "when decision_review_remanded_type is nil" do + it "does not clear error field on related HLR" do + sc.update(decision_review_remanded_type: nil) + subject.perform + expect(hlr.reload.establishment_error).to eql(dta_error) + end + end + end + end +end diff --git a/spec/jobs/sc_dta_for_appeal_fix_job_spec.rb b/spec/jobs/sc_dta_for_appeal_fix_job_spec.rb new file mode 100644 index 00000000000..cdf8980881f --- /dev/null +++ b/spec/jobs/sc_dta_for_appeal_fix_job_spec.rb @@ -0,0 +1,81 @@ +# frozen_string_literal: true + +describe ScDtaForAppealFixJob, :postgres do + let(:sc_dta_for_appeal_error) { "Can't create a SC DTA for appeal" } + let!(:veteran_file_number) { "111223333" } + let!(:veteran_file_number_2) { "999999999" } + + # let!(:veteran) { create(:veteran, file_number: veteran_file_number) } + let(:appeal) { create(:appeal, veteran_file_number: veteran_file_number) } + let(:appeal_2) { create(:appeal, veteran_file_number: veteran_file_number_2) } + let!(:decision_doc_with_error) do + create( + :decision_document, + error: sc_dta_for_appeal_error, + appeal: appeal + ) + end + + let!(:decision_doc_with_error_2) do + create( + :decision_document, + error: sc_dta_for_appeal_error, + appeal: appeal_2 + ) + end + + before do + create_list(:decision_document, 5) + end + + subject { described_class.new } + + context "#sc_dta_for_appeal_fix" do + context "when payee_code is nil" do + before do + decision_doc_with_error.appeal.claimant.update(payee_code: nil) + end + # we need to manipulate the claimant.type for these describes + describe "claimant.type is VeteranClaimant" do + it "updates payee_code to 00" do + decision_doc_with_error_2.appeal.claimant.update(payee_code: nil) + + subject.sc_dta_for_appeal_fix + expect(decision_doc_with_error.appeal.claimant.payee_code).to eq("00") + expect(decision_doc_with_error_2.appeal.claimant.payee_code).to eq("00") + end + + it "clears error column" do + subject.sc_dta_for_appeal_fix + expect(decision_doc_with_error.reload.error).to be_nil + end + end + + describe "claimant.type is DependentClaimant" do + it "updates payee_code to 10" do + decision_doc_with_error.appeal.claimant.update(type: "DependentClaimant") + subject.sc_dta_for_appeal_fix + expect(decision_doc_with_error.appeal.claimant.payee_code).to eq("10") + end + + it "clears error column" do + decision_doc_with_error.appeal.claimant.update(type: "DependentClaimant") + subject.sc_dta_for_appeal_fix + expect(decision_doc_with_error.reload.error).to be_nil + end + end + end + + context "when payee_code is populated" do + it "does not update payee_code" do + expect(decision_doc_with_error.appeal.claimant.payee_code).to eq("00") + subject.sc_dta_for_appeal_fix + expect(decision_doc_with_error.appeal.claimant.payee_code).to eq("00") + end + it "does not clear error field" do + subject.sc_dta_for_appeal_fix + expect(decision_doc_with_error.error).to eq(sc_dta_for_appeal_error) + end + end + end +end diff --git a/spec/services/stuck_job_report_service_spec.rb b/spec/services/stuck_job_report_service_spec.rb new file mode 100644 index 00000000000..ce5310cf845 --- /dev/null +++ b/spec/services/stuck_job_report_service_spec.rb @@ -0,0 +1,85 @@ +# frozen_string_literal: true + +describe StuckJobReportService, :postres do + ERROR_TEXT = "Descriptive Error Name" + FAILED_TRANSACTION_ERROR = "great error" + STUCK_JOB_NAME = "VBMS::UnknownUser" + BUCKET_NAME = "data-remediation-output" + CREATE_FILE_NAME = "descriptive-error-name" + FILEPATH = "/var/folders/fc/8gwfm4251qlb2nzgn3g4kldm0000gp/T/cdc-log.txt20230831-49789-qkyx0t" + + before do + Timecop.freeze + end + + fake_data = [ + { + class_name: "Decision Document", + id: 1 + }, + { + class_name: "Decision Document", + id: 2 + }, + { + class_name: "Decision Document", + id: 3 + }, + { + class_name: "Decision Document", + id: 4 + } + ] + + subject { described_class.new } + + context "StuckJobReportService" do + it "writes the job report" do + subject.append_record_count(4, STUCK_JOB_NAME) + + fake_data.map do |data| + subject.append_single_record(data[:class_name], data[:id]) + end + + subject.append_record_count(0, STUCK_JOB_NAME) + subject.write_log_report(ERROR_TEXT) + + expect(subject.logs[0]).to include("#{Time.zone.now} ********** Remediation Log Report **********") + expect(subject.logs[1]).to include("#{STUCK_JOB_NAME}::Log - Total number of Records with Errors: 4") + expect(subject.logs[5]).to include("Record Type: Decision Document - Record ID: 4.") + expect(subject.logs[6]).to include("#{STUCK_JOB_NAME}::Log - Total number of Records with Errors: 0") + end + + it "writes error log report" do + subject.append_record_count(4, STUCK_JOB_NAME) + + fake_data.map do |data| + subject.append_error(data[:class_name], data[:id], FAILED_TRANSACTION_ERROR) + end + + subject.append_record_count(4, STUCK_JOB_NAME) + subject.write_log_report(ERROR_TEXT) + + expect(subject.logs[0]).to include("#{Time.zone.now} ********** Remediation Log Report **********") + expect(subject.logs[1]).to include("#{STUCK_JOB_NAME}::Log - Total number of Records with Errors: 4") + expect(subject.logs[5]).to include("Record Type: Decision Document - Record ID: 4. Encountered great error,"\ + " record not updated.") + expect(subject.logs[6]).to include("#{STUCK_JOB_NAME}::Log - Total number of Records with Errors: 4") + end + + describe "names the S3 bucket correctly" + it "names uat bucket" do + allow(Rails).to receive(:deploy_env).and_return(:uat) + + subject.upload_logs(CREATE_FILE_NAME) + expect(subject.folder_name).to eq("data-remediation-output-uat") + end + + it "names prod bucket" do + allow(Rails).to receive(:deploy_env).and_return(:prod) + + subject.upload_logs(CREATE_FILE_NAME) + expect(subject.folder_name).to eq("data-remediation-output") + end + end +end