diff --git a/.github/workflows/workflow.yml b/.github/workflows/workflow.yml index f2dec46dfae..d89fe2a19d5 100644 --- a/.github/workflows/workflow.yml +++ b/.github/workflows/workflow.yml @@ -45,6 +45,15 @@ jobs: ports: - 1521:1521 + localstack: + image: localstack/localstack:0.14.5 + ports: + - 4566:4566 + env: + SERVICES: "sqs" + volumes: + - /var/run/docker.sock:/var/run/docker.sock + strategy: fail-fast: false matrix: @@ -188,6 +197,9 @@ jobs: - name: "Wait for database" run: dockerize -wait tcp://postgres:5432 -timeout 1m + - name: "Wait for localstack" + run: dockerize -wait tcp://localstack:4566 -timeout 30s + - name: "Wait for FACOLS" run: ./ci-bin/capture-log "bundle exec rake local:vacols:wait_for_connection" diff --git a/Dockerfile b/Dockerfile index 16d641ac7ce..a774e7e8943 100644 --- a/Dockerfile +++ b/Dockerfile @@ -66,10 +66,6 @@ RUN apt install -y ${CASEFLOW} && \ # install jemalloc RUN apt install -y --no-install-recommends libjemalloc-dev - -# install datadog agent -RUN DD_INSTALL_ONLY=true DD_AGENT_MAJOR_VERSION=7 DD_API_KEY=$(cat config/datadog.key) bash -c "$(curl -L https://raw.githubusercontent.com/DataDog/datadog-agent/master/cmd/agent/install_script.sh)" - RUN rm -rf /var/lib/apt/lists/* # Installing the version of bundler that corresponds to the Gemfile.lock diff --git a/Gemfile b/Gemfile index 92e60188bc8..a6e0728b60e 100644 --- a/Gemfile +++ b/Gemfile @@ -22,8 +22,6 @@ gem "connect_mpi", git: "https://github.com/department-of-veterans-affairs/conne gem "connect_vbms", git: "https://github.com/department-of-veterans-affairs/connect_vbms.git", ref: "9807d9c9f0f3e3494a60b6693dc4f455c1e3e922" gem "console_tree_renderer", git: "https://github.com/department-of-veterans-affairs/console-tree-renderer.git", tag: "v0.1.1" gem "countries" -gem "ddtrace" -gem "dogstatsd-ruby" gem "dry-schema", "~> 1.4" gem "fast_jsonapi" gem "fuzzy_match" @@ -33,8 +31,28 @@ gem "icalendar" gem "kaminari" gem "logstasher" gem "moment_timezone-rails" -gem "newrelic_rpm" gem "nokogiri", ">= 1.11.0.rc4" + +gem "opentelemetry-exporter-otlp", require: false +gem "opentelemetry-sdk", require: false + +gem "opentelemetry-instrumentation-action_pack", require: false +gem "opentelemetry-instrumentation-action_view", require: false +gem "opentelemetry-instrumentation-active_job", require: false +gem "opentelemetry-instrumentation-active_model_serializers", require: false +gem "opentelemetry-instrumentation-active_record", require: false +gem "opentelemetry-instrumentation-aws_sdk", require: false +gem "opentelemetry-instrumentation-concurrent_ruby", require: false +gem "opentelemetry-instrumentation-faraday", require: false +gem "opentelemetry-instrumentation-http", require: false +gem "opentelemetry-instrumentation-http_client", require: false +gem "opentelemetry-instrumentation-net_http", require: false +gem "opentelemetry-instrumentation-pg", require: false +gem "opentelemetry-instrumentation-rack", require: false +gem "opentelemetry-instrumentation-rails", require: false +gem "opentelemetry-instrumentation-rake", require: false +gem "opentelemetry-instrumentation-redis", require: false + gem "paper_trail", "~> 12.0" # Used to speed up reporting gem "parallel" @@ -59,7 +77,7 @@ gem "rainbow" # React gem "react_on_rails", "11.3.0" gem "redis-mutex" -gem "redis-namespace" +gem "redis-namespace", "~> 1.11.0" gem "redis-rails", "~> 5.0.2" gem "request_store" gem "roo", "~> 2.7" @@ -119,7 +137,8 @@ group :test, :development, :demo, :make_docs do gem "rubocop-performance" gem "rubocop-rails" gem "scss_lint", require: false - gem "simplecov", git: "https://github.com/colszowka/simplecov.git", require: false + gem "simplecov", require: false + gem "simplecov-lcov", require: false gem "single_cov" gem "sniffybara", git: "https://github.com/department-of-veterans-affairs/sniffybara.git" gem "sql_tracker" diff --git a/Gemfile.lock b/Gemfile.lock index f2db3b52b7f..2632deaf2e2 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,12 +1,3 @@ -GIT - remote: https://github.com/colszowka/simplecov.git - revision: 783c9d7e9995f3ea9baf9fbb517c1d0ceb12acdb - specs: - simplecov (0.15.1) - docile (~> 1.1.0) - json (>= 1.8, < 3) - simplecov-html (~> 0.10.0) - GIT remote: https://github.com/department-of-veterans-affairs/caseflow-commons revision: 9bd3635fbd8094d25160669f38d8699e2f1d7a98 @@ -1458,7 +1449,7 @@ GEM backport (1.2.0) base64 (0.2.0) benchmark (0.3.0) - benchmark-ips (2.7.2) + benchmark-ips (2.14.0) bootsnap (1.7.5) msgpack (~> 1.0) brakeman (4.7.1) @@ -1529,8 +1520,6 @@ GEM database_cleaner-core (~> 2.0.0) database_cleaner-core (2.0.1) date (3.3.3) - ddtrace (0.34.1) - msgpack dead_end (4.0.0) debase (0.2.4.1) debase-ruby_core_source (>= 0.10.2) @@ -1548,8 +1537,7 @@ GEM ruby-statistics (>= 2.1) thor (>= 0.19, < 2) diff-lcs (1.3) - docile (1.1.5) - dogstatsd-ruby (4.4.0) + docile (1.4.1) dotenv (2.8.1) dotenv-rails (2.8.1) dotenv (= 2.8.1) @@ -1614,13 +1602,16 @@ GEM foreman (0.87.2) formatador (0.2.5) fuzzy_match (2.1.0) - get_process_mem (0.2.4) + get_process_mem (0.2.7) ffi (~> 1.0) git (1.13.2) addressable (~> 2.8) rchardet (~> 1.8) globalid (1.2.1) activesupport (>= 6.1) + google-protobuf (3.25.4) + googleapis-common-protos-types (1.15.0) + google-protobuf (>= 3.18, < 5.a) govdelivery-tms (2.8.4) activesupport faraday @@ -1644,7 +1635,8 @@ GEM builder (>= 2.1.2) hana (1.3.6) hashdiff (1.0.0) - heapy (0.1.4) + heapy (0.2.0) + thor holidays (6.6.1) httpclient (2.8.3) httpi (2.4.4) @@ -1664,7 +1656,6 @@ GEM rails-dom-testing (>= 1, < 3) railties (>= 4.2.0) thor (>= 0.14, < 2.0) - json (2.3.0) json-schema (4.3.0) addressable (>= 2.8) json_schemer (0.2.16) @@ -1709,7 +1700,7 @@ GEM net-pop net-smtp marcel (1.0.4) - memory_profiler (0.9.14) + memory_profiler (1.0.2) meta_request (0.7.2) rack-contrib (>= 1.1, < 3) railties (>= 3.0.0, < 7) @@ -1719,7 +1710,7 @@ GEM mime-types-data (3.2019.1009) mini_histogram (0.3.1) mini_mime (1.1.2) - mini_portile2 (2.8.5) + mini_portile2 (2.8.7) minitest (5.19.0) moment_timezone-rails (0.5.0) momentjs-rails (2.29.4.1) @@ -1737,7 +1728,6 @@ GEM timeout net-smtp (0.3.3) net-protocol - newrelic_rpm (6.5.0.357) nio4r (2.5.9) no_proxy_fix (0.1.2) nokogiri (1.15.5) @@ -1751,6 +1741,92 @@ GEM faraday (>= 0.9) sawyer (~> 0.8.0, >= 0.5.3) open4 (1.3.4) + opentelemetry-api (1.1.0) + opentelemetry-common (0.19.7) + opentelemetry-api (~> 1.0) + opentelemetry-exporter-otlp (0.24.2) + google-protobuf (~> 3.19) + googleapis-common-protos-types (~> 1.3) + opentelemetry-api (~> 1.1) + opentelemetry-common (~> 0.19.6) + opentelemetry-sdk (~> 1.2) + opentelemetry-semantic_conventions + opentelemetry-instrumentation-action_pack (0.5.0) + opentelemetry-api (~> 1.0) + opentelemetry-instrumentation-base (~> 0.21.0) + opentelemetry-instrumentation-rack (~> 0.21) + opentelemetry-instrumentation-action_view (0.4.0) + opentelemetry-api (~> 1.0) + opentelemetry-instrumentation-active_support (~> 0.1) + opentelemetry-instrumentation-base (~> 0.20) + opentelemetry-instrumentation-active_job (0.4.0) + opentelemetry-api (~> 1.0) + opentelemetry-instrumentation-base (~> 0.21.0) + opentelemetry-instrumentation-active_model_serializers (0.19.1) + opentelemetry-api (~> 1.0) + opentelemetry-instrumentation-base (~> 0.21.0) + opentelemetry-instrumentation-active_record (0.5.0) + opentelemetry-api (~> 1.0) + opentelemetry-instrumentation-base (~> 0.21.0) + ruby2_keywords + opentelemetry-instrumentation-active_support (0.3.0) + opentelemetry-api (~> 1.0) + opentelemetry-instrumentation-base (~> 0.21.0) + opentelemetry-instrumentation-aws_sdk (0.3.2) + opentelemetry-api (~> 1.0) + opentelemetry-instrumentation-base (~> 0.21.0) + opentelemetry-instrumentation-base (0.21.1) + opentelemetry-api (~> 1.0) + opentelemetry-registry (~> 0.1) + opentelemetry-instrumentation-concurrent_ruby (0.20.1) + opentelemetry-api (~> 1.0) + opentelemetry-instrumentation-base (~> 0.21.0) + opentelemetry-instrumentation-faraday (0.22.0) + opentelemetry-api (~> 1.0) + opentelemetry-common (~> 0.19.3) + opentelemetry-instrumentation-base (~> 0.21.0) + opentelemetry-instrumentation-http (0.21.0) + opentelemetry-api (~> 1.0) + opentelemetry-instrumentation-base (~> 0.21.0) + opentelemetry-instrumentation-http_client (0.21.0) + opentelemetry-api (~> 1.0) + opentelemetry-common (~> 0.19.3) + opentelemetry-instrumentation-base (~> 0.21.0) + opentelemetry-instrumentation-net_http (0.21.1) + opentelemetry-api (~> 1.0) + opentelemetry-common (~> 0.19.3) + opentelemetry-instrumentation-base (~> 0.21.0) + opentelemetry-instrumentation-pg (0.23.0) + opentelemetry-api (~> 1.0) + opentelemetry-instrumentation-base (~> 0.21.0) + opentelemetry-instrumentation-rack (0.22.1) + opentelemetry-api (~> 1.0) + opentelemetry-common (~> 0.19.3) + opentelemetry-instrumentation-base (~> 0.21.0) + opentelemetry-instrumentation-rails (0.25.0) + opentelemetry-api (~> 1.0) + opentelemetry-instrumentation-action_pack (~> 0.5.0) + opentelemetry-instrumentation-action_view (~> 0.4.0) + opentelemetry-instrumentation-active_job (~> 0.4.0) + opentelemetry-instrumentation-active_record (~> 0.5.0) + opentelemetry-instrumentation-active_support (~> 0.3.0) + opentelemetry-instrumentation-base (~> 0.21.0) + opentelemetry-instrumentation-rake (0.1.1) + opentelemetry-api (~> 1.0) + opentelemetry-instrumentation-base (~> 0.21.0) + opentelemetry-instrumentation-redis (0.24.1) + opentelemetry-api (~> 1.0) + opentelemetry-common (~> 0.19.3) + opentelemetry-instrumentation-base (~> 0.21.0) + opentelemetry-registry (0.2.0) + opentelemetry-api (~> 1.1) + opentelemetry-sdk (1.2.1) + opentelemetry-api (~> 1.1) + opentelemetry-common (~> 0.19.3) + opentelemetry-registry (~> 0.2) + opentelemetry-semantic_conventions + opentelemetry-semantic_conventions (1.10.0) + opentelemetry-api (~> 1.0) paper_trail (12.3.0) activerecord (>= 5.2) request_store (~> 1.1) @@ -1840,8 +1916,8 @@ GEM redis-namespace (~> 1.0) redis-mutex (4.0.2) redis-classy (~> 2.0) - redis-namespace (1.6.0) - redis (>= 3.0.4) + redis-namespace (1.11.0) + redis (>= 4) redis-rack (2.0.4) rack (>= 1.5, < 3) redis-store (>= 1.2, < 2) @@ -1919,6 +1995,7 @@ GEM ruby-prof (1.4.1) ruby-progressbar (1.13.0) ruby-statistics (3.0.2) + ruby2_keywords (0.0.5) ruby_dep (1.5.0) ruby_parser (3.20.3) sexp_processor (~> 4.16) @@ -1963,7 +2040,13 @@ GEM thor shoulda-matchers (5.3.0) activesupport (>= 5.2.0) - simplecov-html (0.10.2) + simplecov (0.22.0) + docile (~> 1.1) + simplecov-html (~> 0.11) + simplecov_json_formatter (~> 0.1) + simplecov-html (0.13.1) + simplecov-lcov (0.8.0) + simplecov_json_formatter (0.1.4) single_cov (1.3.2) sixarm_ruby_unaccent (1.2.0) socksify (1.7.1) @@ -2072,10 +2155,8 @@ DEPENDENCIES countries danger (~> 6.2.2) database_cleaner-active_record (= 2.0.0) - ddtrace debase derailed_benchmarks - dogstatsd-ruby dotenv-rails dry-schema (~> 1.4) factory_bot_rails (~> 5.2) @@ -2095,8 +2176,25 @@ DEPENDENCIES logstasher meta_request moment_timezone-rails - newrelic_rpm nokogiri (>= 1.11.0.rc4) + opentelemetry-exporter-otlp + opentelemetry-instrumentation-action_pack + opentelemetry-instrumentation-action_view + opentelemetry-instrumentation-active_job + opentelemetry-instrumentation-active_model_serializers + opentelemetry-instrumentation-active_record + opentelemetry-instrumentation-aws_sdk + opentelemetry-instrumentation-concurrent_ruby + opentelemetry-instrumentation-faraday + opentelemetry-instrumentation-http + opentelemetry-instrumentation-http_client + opentelemetry-instrumentation-net_http + opentelemetry-instrumentation-pg + opentelemetry-instrumentation-rack + opentelemetry-instrumentation-rails + opentelemetry-instrumentation-rake + opentelemetry-instrumentation-redis + opentelemetry-sdk paper_trail (~> 12.0) parallel paranoia (~> 2.2) @@ -2115,7 +2213,7 @@ DEPENDENCIES rb-readline react_on_rails (= 11.3.0) redis-mutex - redis-namespace + redis-namespace (~> 1.11.0) redis-rails (~> 5.0.2) request_store roo (~> 2.7) @@ -2139,7 +2237,8 @@ DEPENDENCIES sentry-raven shoryuken (= 3.1.11) shoulda-matchers - simplecov! + simplecov + simplecov-lcov single_cov sniffybara! solargraph diff --git a/Makefile.example b/Makefile.example index f37b02940ed..1305da089ec 100644 --- a/Makefile.example +++ b/Makefile.example @@ -289,7 +289,7 @@ one-test-headless: # run the rspec test headless. CI=1 bundle exec rspec $(RUN_ARGS) --format progress run-all-queues: ## start shoryuken with all queues - bundle exec shoryuken -q caseflow_development_send_notifications caseflow_development_high_priority caseflow_development_low_priority -R + bundle exec shoryuken -q caseflow_development_send_notifications.fifo caseflow_development_high_priority caseflow_development_low_priority -R run-low-priority: ## start shoryuken with just the low priority queue bundle exec shoryuken -q caseflow_development_low_priority -R @@ -298,7 +298,7 @@ run-high-priority: ## start shoryuken with just the high priority queue bundle exec shoryuken -q caseflow_development_high_priority -R run-send-notifications: ## start shoryuken with just the send_notification queue - bundle exec shoryuken -q caseflow_development_send_notifications -R + bundle exec shoryuken -q caseflow_development_send_notifications.fifo -R jest: ## Run jest tests cd client && yarn jest diff --git a/README.md b/README.md index a9f82e9f92d..281703aa1e2 100644 --- a/README.md +++ b/README.md @@ -137,13 +137,6 @@ See debugging steps as well as more information about FACOLS in our [wiki](https Review the [FACOLS documentation](docs/FACOLS.md) for details. ## Monitoring ####################################################### -We use NewRelic to monitor the app. By default, it's disabled locally. To enable it, do: - -``` -NEW_RELIC_LICENSE_KEY='' NEW_RELIC_AGENT_ENABLED=true bundle exec foreman start -``` - -You may wish to do this if you are debugging our NewRelic integration, for instance. --- diff --git a/app/controllers/api/application_controller.rb b/app/controllers/api/application_controller.rb index adb315f0d93..33b11c6b809 100644 --- a/app/controllers/api/application_controller.rb +++ b/app/controllers/api/application_controller.rb @@ -3,8 +3,6 @@ class Api::ApplicationController < ActionController::Base protect_from_forgery with: :null_session - include TrackRequestId - before_action :strict_transport_security before_action :setup_fakes, diff --git a/app/controllers/api/v1/va_notify_controller.rb b/app/controllers/api/v1/va_notify_controller.rb index d47a946fd06..9ddb119e58e 100644 --- a/app/controllers/api/v1/va_notify_controller.rb +++ b/app/controllers/api/v1/va_notify_controller.rb @@ -1,46 +1,63 @@ # frozen_string_literal: true class Api::V1::VaNotifyController < Api::ApplicationController - # Purpose: POST request to VA Notify API to update status for a Notification entry + # Purpose: POST request to VA Notify API to update status for a Notification entry. # # Params: Params content can be found at https://vajira.max.gov/browse/APPEALS-21021 # # Response: Update corresponding Notification status def notifications_update - send "#{required_params[:notification_type]}_update" + send_sqs_message + render json: { + message: "#{params['notification_type']} Notification successfully updated: ID #{params['id']}" + } + rescue StandardError => error + log_error(error, params["id"], params["notification_type"]) + render json: { error: error.message }, status: :bad_request end private - # Purpose: Finds and updates notification if type is email - # - # Params: Params content can be found at https://vajira.max.gov/browse/APPEALS-21021 - # - # Response: Update corresponding email Notification status - def email_update - redis.set("email_update:#{required_params[:id]}:#{required_params[:status]}", 0) - - render json: { message: "Email notification successfully updated: ID #{required_params[:id]}" } + def va_notify_params + params.permit(:id, :notification_type, :status, :status_reason, :to) end - # Purpose: Finds and updates notification if type is SMS - # - # Params: Params content can be found at https://vajira.max.gov/browse/APPEALS-21021 - # - # Response: Update corresponding SMS Notification status - def sms_update - redis.set("sms_update:#{required_params[:id]}:#{required_params[:status]}", 0) - - render json: { message: "SMS notification successfully updated: ID #{required_params[:id]}" } + def build_message_body + id_param, notification_type_param, status_param = va_notify_params.require([:id, :notification_type, :status]) + + { + external_id: id_param, + notification_type: notification_type_param, + recipient: va_notify_params[:to], + status: status_param, + status_reason: va_notify_params[:status_reason] + } + rescue StandardError => error + raise error end - def required_params - id_param, notification_type_param, status_param = params.require([:id, :notification_type, :status]) + def build_sqs_message + message_body = build_message_body.to_json + + { + queue_url: SqsService.find_queue_url_by_name(name: "receive_notifications"), + message_body: message_body, + message_deduplication_id: Digest::SHA256.hexdigest(message_body), + message_group_id: Constants.VA_NOTIFY_CONSTANTS.message_group_id + } + rescue StandardError => error + raise error + end - { id: id_param, notification_type: notification_type_param, status: status_param } + def send_sqs_message + sqs = SqsService.sqs_client + sqs.send_message(build_sqs_message) end - def redis - @redis ||= Redis.new(url: Rails.application.secrets.redis_url_cache) + def log_error(error, external_id, notification_type) + Rails.logger.error("#{error.message}\n#{error.backtrace.join("\n")}\n \ + external_id: #{external_id}\n \ + notification_type: #{notification_type}") + Raven.capture_exception(error) end end diff --git a/app/controllers/application_base_controller.rb b/app/controllers/application_base_controller.rb index f9b1304a804..7062f96811d 100644 --- a/app/controllers/application_base_controller.rb +++ b/app/controllers/application_base_controller.rb @@ -6,8 +6,6 @@ class ApplicationBaseController < ActionController::Base # For APIs, you may want to use :null_session instead. protect_from_forgery with: :exception - include TrackRequestId - before_action :check_out_of_service before_action :strict_transport_security diff --git a/app/controllers/case_distribution_levers_tests_controller.rb b/app/controllers/case_distribution_levers_tests_controller.rb index 11ccd90e0c8..e7eed173af9 100644 --- a/app/controllers/case_distribution_levers_tests_controller.rb +++ b/app/controllers/case_distribution_levers_tests_controller.rb @@ -8,6 +8,7 @@ class CaseDistributionLeversTestsController < ApplicationController def acd_lever_index_test @acd_levers = CaseDistributionLever.all @acd_history = CaseDistributionAuditLeverEntry.lever_history + @returned_appeal_jobs = ReturnedAppealJob.all.order(created_at: :desc).limit(15) render "case_distribution_levers/test" end @@ -40,6 +41,13 @@ def run_demo_docket_priority head :ok end + def run_demo_non_avlj_appeals + Rake::Task["db:seed:non_ssc_avlj_legacy_appeals"].reenable + Rake::Task["db:seed:non_ssc_avlj_legacy_appeals"].invoke + + head :ok + end + def appeals_ready_to_distribute csv_data = AppealsReadyForDistribution.process @@ -66,6 +74,17 @@ def appeals_non_priority_ready_to_distribute send_data csv_data, filename: filename end + def run_return_legacy_appeals_to_board + result = ReturnLegacyAppealsToBoardJob.perform_now + + unless result + render json: { error: "Job failed" }, status: :unprocessable_entity + return + end + + head :ok + end + def appeals_distributed # change this to the correct class csv_data = AppealsDistributed.process @@ -80,6 +99,20 @@ def appeals_distributed send_data csv_data, filename: filename end + def appeals_in_location_63_in_past_2_days + # change this to the correct class + csv_data = AppealsInLocation63InPast2Days.process + + # Get the current date and time for dynamic filename + current_datetime = Time.zone.now.strftime("%Y%m%d-%H%M") + + # Set dynamic filename with current date and time + filename = "appeals_in_location_63_past_2_days_#{current_datetime}.csv" + + # Send CSV as a response with dynamic filename + send_data csv_data, filename: filename + end + def ineligible_judge_list # change this to the correct class csv_data = IneligibleJudgeList.process @@ -94,6 +127,32 @@ def ineligible_judge_list send_data csv_data, filename: filename end + def appeals_tied_to_non_ssc_avlj + csv_data = AppealsTiedToNonSscAvljQuery.process + + # Get the current date and time for dynamic filename + current_datetime = Time.zone.now.strftime("%Y%m%d-%H%M") + + # Set dynamic filename with current date and time + filename = "appeals_tied_to_non_ssc_avljs_#{current_datetime}.csv" + + # Send CSV as a response with dynamic filename + send_data csv_data, filename: filename + end + + def appeals_tied_to_avljs_and_vljs + csv_data = AppealsTiedToAvljsAndVljsQuery.process + + # Get the current date and time for dynamic filename + current_datetime = Time.zone.now.strftime("%Y%m%d-%H%M") + + # Set dynamic filename with current date and time + filename = "appeals_tied_to_avljs_and_vljs#{current_datetime}.csv" + + # Send CSV as a response with dynamic filename + send_data csv_data, filename: filename + end + private def check_environment diff --git a/app/controllers/concerns/track_request_id.rb b/app/controllers/concerns/track_request_id.rb deleted file mode 100644 index 188256c88f0..00000000000 --- a/app/controllers/concerns/track_request_id.rb +++ /dev/null @@ -1,13 +0,0 @@ -# frozen_string_literal: true - -module TrackRequestId - extend ActiveSupport::Concern - - included do - before_action :track_request_id - end - - def track_request_id - ::NewRelic::Agent.add_custom_attributes(request_id: request.uuid) - end -end diff --git a/app/controllers/dependencies_checks_controller.rb b/app/controllers/dependencies_checks_controller.rb index ee42e93b5c8..a39f57a4be0 100644 --- a/app/controllers/dependencies_checks_controller.rb +++ b/app/controllers/dependencies_checks_controller.rb @@ -1,8 +1,6 @@ # frozen_string_literal: true class DependenciesChecksController < ApplicationBaseController - newrelic_ignore_apdex - skip_before_action :check_out_of_service def show diff --git a/app/controllers/health_checks_controller.rb b/app/controllers/health_checks_controller.rb index 4ee3acc4440..4451296b081 100644 --- a/app/controllers/health_checks_controller.rb +++ b/app/controllers/health_checks_controller.rb @@ -2,11 +2,9 @@ # rubocop:disable Rails/ApplicationController class HealthChecksController < ActionController::Base - include TrackRequestId include CollectCustomMetrics protect_from_forgery with: :exception - newrelic_ignore_apdex def show body = { diff --git a/app/helpers/sync_decided_appeals_helper.rb b/app/helpers/sync_decided_appeals_helper.rb new file mode 100644 index 00000000000..45adcc0e36e --- /dev/null +++ b/app/helpers/sync_decided_appeals_helper.rb @@ -0,0 +1,76 @@ +# frozen_string_literal: true + +## +# Helper to sync the decided appeals and their decision_mailed status + +module SyncDecidedAppealsHelper + VACOLS_BATCH_PROCESS_LIMIT = ENV["VACOLS_QUERY_BATCH_SIZE"] || 800 + + # Syncs the decision_mailed status of Legacy Appeals with a decision made + def sync_decided_appeals + begin + # Join query to retrieve Legacy AppealState ids and corresponding vacols_id + appeal_state_ids = AppealState.legacy.where(decision_mailed: false) + .joins(:legacy_appeal).preload(:legacy_appeal) + .pluck(:id, :vacols_id) + + appeal_state_ids_hash = appeal_state_ids.to_h + + vacols_decision_dates = get_decision_dates(appeal_state_ids_hash.values).to_h + + ActiveSupport::Dependencies.interlock.permit_concurrent_loads do + Parallel.each(appeal_state_ids_hash, in_threads: 4) do |appeal_state_hash| + appeal_state_id = appeal_state_hash[0] + vacols_id = appeal_state_hash[1] + # If there is a decision date on the VACOLS record, + # update the decision_mailed status on the AppealState to true + if vacols_decision_dates[vacols_id].present? + AppealState.find(appeal_state_id).decision_mailed_appeal_state_update_action! + end + end + end + rescue StandardError => error + Rails.logger.error("#{error.class}: #{error.message}\n#{error.backtrace}") + + # Re-raising the error so it can be caught in the NightlySyncsJob report + raise error + end + end + + # Method to retrieve the decision dates from VACOLS in batches + # params: vacols_ids + # Returns: Hash containing the key, value pair of vacols_id, decision_date + def get_decision_dates(vacols_ids) + begin + decision_dates = {} + + # Query VACOLS in batches + vacols_ids.in_groups_of(VACOLS_BATCH_PROCESS_LIMIT.to_i) do |vacols_id| + VACOLS::Case.where(bfkey: vacols_id).each do |vacols_record| + decision_dates[vacols_record[:bfkey]] = vacols_record[:bfddec] + end + end + + decision_dates + rescue ActiveRecord::RecordNotFound + [] + end + end + + def get_vacols_ids(legacy_appeal_states) + begin + vacols_ids = {} + + legacy_appeal_states.each do |appeal_state| + legacy_appeal = LegacyAppeal.find(appeal_state.appeal_id) + + # Find the VACOLS record associated with the LegacyAppeal + vacols_ids << { appeal_state.id.to_s => (legacy_appeal[:vacols_id]).to_s } + end + + vacols_ids + rescue ActiveRecord::RecordNotFound + {} + end + end +end diff --git a/app/jobs/caseflow_job.rb b/app/jobs/caseflow_job.rb index b9717f403b0..c03e375a05c 100644 --- a/app/jobs/caseflow_job.rb +++ b/app/jobs/caseflow_job.rb @@ -7,7 +7,6 @@ class CaseflowJob < ApplicationJob job.start_time = Time.zone.now end - # Automatically report runtime to DataDog if job does not explicitly report to DataDog. # Note: This block is not called if an error occurs when `perform` is executed -- # see https://stackoverflow.com/questions/50263787/does-active-job-call-after-perform-when-perform-raises-an-error after_perform do |job| diff --git a/app/jobs/nightly_syncs_job.rb b/app/jobs/nightly_syncs_job.rb index ef835251d04..664a1fa0ce3 100644 --- a/app/jobs/nightly_syncs_job.rb +++ b/app/jobs/nightly_syncs_job.rb @@ -6,6 +6,7 @@ class NightlySyncsJob < CaseflowJob queue_with_priority :low_priority application_attr :queue # arbitrary + include SyncDecidedAppealsHelper def perform RequestStore.store[:current_user] = User.system_user @@ -16,6 +17,7 @@ def perform sync_vacols_users sync_decision_review_tasks sync_bgs_attorneys + sync_all_decided_appeals slack_service.send_notification(@slack_report.join("\n"), self.class.name) if @slack_report.any? end @@ -84,6 +86,14 @@ def sync_bgs_attorneys @slack_report << "*Fatal error in sync_bgs_attorneys:* #{error}" end + def sync_all_decided_appeals + begin + sync_decided_appeals + rescue StandardError => error + @slack_report << "*Fatal error in sync_decided_appeals* #{error}" + end + end + def dangling_legacy_appeals reporter = LegacyAppealsWithNoVacolsCase.new reporter.call @@ -105,5 +115,7 @@ def sync_hearing_states state.scheduled_in_error_appeal_state_update_action! end end + rescue StandardError => error + @slack_report << "*Fatal error in sync_hearing_states* #{error}" end end diff --git a/app/jobs/process_decision_document_job.rb b/app/jobs/process_decision_document_job.rb index e2d0e9f0cee..8f392bab152 100644 --- a/app/jobs/process_decision_document_job.rb +++ b/app/jobs/process_decision_document_job.rb @@ -4,10 +4,10 @@ class ProcessDecisionDocumentJob < CaseflowJob queue_with_priority :low_priority application_attr :intake - def perform(decision_document_id, mail_package = nil) + def perform(decision_document_id, contested, mail_package = nil) RequestStore.store[:application] = "idt" RequestStore.store[:current_user] = User.system_user - DecisionDocument.find(decision_document_id).process!(mail_package) + DecisionDocument.find(decision_document_id).process!(contested, mail_package) end end diff --git a/app/jobs/process_notification_status_updates_job.rb b/app/jobs/process_notification_status_updates_job.rb index 32bb0e443cb..7c8571c8c6a 100644 --- a/app/jobs/process_notification_status_updates_job.rb +++ b/app/jobs/process_notification_status_updates_job.rb @@ -1,45 +1,204 @@ # frozen_string_literal: true +# rubocop:disable Layout/LineLength +# A job that pulls messages from the 'receive_notifications' FIFO SQS queue +# that represent status updates for VA Notify notifications and persists +# the information in our notifications table. +# +# The messages are queued by {Api::V1::VaNotifyController#notifications_update} which is +# an endpoint where VA Notify sends information to us about notifications we've requested +# that they send via their +# {https://github.com/department-of-veterans-affairs/notification-api/blob/1b758dddf2d2c12d73415e4ee508cf6b0e101343/app/celery/service_callback_tasks.py#L29 send_delivery_status_to_service} callback. +# +# This information includes: +# - The latest status pertaining to the notification's delivery (ex: success or temporary-failure) +# - The status reason (extra context around the status, if available) +# - The recipient's email or phone number +# - Caseflow simply provides VA Notify with the intended recipient's participant ID with each initial notification request, and it does not know of the destination of a message until they inform us. +# +# @see https://github.com/department-of-veterans-affairs/caseflow/wiki/VA-Notify +# @see https://github.com/department-of-veterans-affairs/caseflow/wiki/Status-Webhook-API +# rubocop:enable Layout/LineLength class ProcessNotificationStatusUpdatesJob < CaseflowJob + include Hearings::EnsureCurrentUserIsSet + queue_with_priority :low_priority + MESSAGE_GROUP_ID = "VANotifyStatusUpdate" # Used to only process messages queued by the status update webhook + PROCESSING_LIMIT = 5000 # How many updates to perform per job execution + + # Consumes messages from the 'receive_notifications' FIFO SQS queue whose 'MessageGroupId' + # attribute matches MESSAGE_GROUP_ID, and then persists data contained within those messages + # about VA Notify notifications to our 'notifications' table. def perform - RequestStore[:current_user] = User.system_user + ensure_current_user_is_set - redis = Redis.new(url: Rails.application.secrets.redis_url_cache) + begin + number_of_messages_processed = 0 - processed_count = 0 + number_of_messages_processed += process_batch_of_messages while number_of_messages_processed < PROCESSING_LIMIT + rescue Caseflow::Error::SqsQueueExhaustionError + Rails.logger.info("ProcessNotificationStatusUpdatesJob is exiting early due to the queue being empty.") + rescue StandardError => error + log_error(error) + raise error + ensure + Rails.logger.info("#{number_of_messages_processed} messages have been processed by this execution.") + end + end - # prefer scan so we only load a single record into memory, - # dumping the whole list could cause performance issues when job runs - redis.scan_each(match: "*_update:*") do |key| - break if processed_count >= 1000 + private - begin - raw_notification_type, uuid, status = key.split(":") + # Returns the SQS URL of the 'receive_notifications' FIFO SQS queue for the + # current environment using a substring. + # + # @return [String] + # The URL of the queue that messages will be pulled from. + def recv_queue_url + @recv_queue_url ||= SqsService.find_queue_url_by_name(name: "receive_notifications", check_fifo: true) + end - notification_type = extract_notification_type(raw_notification_type) + # Pulls in up to 10 messages from the 'receive_notifications' FIFO SQS queue + # and consume the data in order to persist VA Notify status updates to the + # the notifications table. + # + # @see https://github.com/department-of-veterans-affairs/caseflow/blob/master/app/controllers/api/v1/va_notify_controller.rb + # + # @return [Integer] + # The number of messages that were attempted to be processed in a batch. + def process_batch_of_messages + response = SqsService.sqs_client.receive_message( + { + queue_url: recv_queue_url, + max_number_of_messages: 10, + attribute_names: ["MessageGroupId"] + } + ) - fail InvalidNotificationStatusFormat if [notification_type, uuid, status].any?(&:nil?) + # Exit loop early if there does not seem to be any more messages. + fail Caseflow::Error::SqsQueueExhaustionError if response.messages.empty? - rows_updated = Notification.select(Arel.star).where( - Notification.arel_table["#{notification_type}_notification_external_id".to_sym].eq(uuid) - ).update_all("#{notification_type}_notification_status" => status) + filtered_messages = filter_messages_by_group_id(response.messages) - fail StandardError, "No notification matches UUID #{uuid}" if rows_updated.zero? - rescue StandardError => error - log_error(error) - ensure - # cleanup keys - do first so we don't reporcess any failed keys - redis.del key - processed_count += 1 - end - end + batch_status_updates(filtered_messages) + SqsService.batch_delete_messages(queue_url: recv_queue_url, messages: filtered_messages) + + # Return the number of messages attempted to be processed + filtered_messages.size end - private + # Sorts pending status update messages by notification type and performs up to two + # separate UPDATE queries to persist data to the corresponding notifications + # table records. + # + # @param messages [Array] A collection of AWS SQS messages. + # + # @return [Boolean] + # True/False depending on if the final totals could be logged. + def batch_status_updates(messages) + parsed_bodies = messages.map { |msg| JSON.parse(msg.body) } + + email_rows_update_count = update_email_statuses(filter_body_by_notification_type(parsed_bodies, "email")) + sms_rows_update_count = update_sms_statuses(filter_body_by_notification_type(parsed_bodies, "sms")) + + Rails.logger.info( + "Email statuses updated: #{email_rows_update_count} - SMS statuses updated: #{sms_rows_update_count}" + ) + end + + # Filters messages bodies by notification_type. + # + # @param bodies [Array>] A collection of the bodies of messages that have been + # parsed into hashes. + # @param notification_type [String] The type of notification to filter for. 'email' and 'sms' + # are the two valid types at the time of writing this comment. + # + # @return [Array>] + # Messages bodies whose notification_type matches the desired one. + def filter_body_by_notification_type(bodies, notification_type) + bodies.filter { _1["notification_type"] == notification_type } + end + + # Performs updates to any email notifications in the current batch of messages + # being processed. Statuses, status reasons, and recipient informations are items that are updated. + # + # @param status_update_list [Array>] A collection of the bodies of messages that have been + # parsed into hashes. These represent VA Notify status updates. + # + # @return [Integer] + # The number of rows that have been updated. + def update_email_statuses(status_update_list) + return 0 if status_update_list.empty? + + query = <<-SQL + UPDATE notifications AS n SET + email_notification_status = new.n_status, + recipient_email = new.recipient, + email_status_reason = new.status_reason + FROM ( VALUES + #{build_values_mapping(status_update_list)} + ) AS new(external_id, n_status, status_reason, recipient) + WHERE new.external_id = n.email_notification_external_id + SQL + + ActiveRecord::Base.connection.update(query) + end + + # Performs updates to any SMS notifications in the current batch of messages + # being processed. Statuses, status reasons, and recipient informations are items that are updated. + # + # @param status_update_list [Array>] A collection of the bodies of messages that have been + # parsed into hashes. These represent VA Notify status updates. + # + # @return [Integer] + # The number of rows that have been updated. + def update_sms_statuses(status_update_list) + return 0 if status_update_list.empty? + + query = <<-SQL + UPDATE notifications AS n SET + sms_notification_status = new.n_status, + recipient_phone_number = new.recipient, + sms_status_reason = new.status_reason + FROM ( VALUES + #{build_values_mapping(status_update_list)} + ) AS new(external_id, n_status, status_reason, recipient) + WHERE new.external_id = n.sms_notification_external_id + SQL + + ActiveRecord::Base.connection.update(query) + end + + # Builds a comma-delimited list of VALUES expressions to represent the data to be used + # in updated notification statuses, status reasons, and recipient information. + # + # @param status_update_list [Array>] A collection of the bodies of messages that have been + # parsed into hashes. These represent VA Notify status updates. + # + # @return [String] + # A sanitized SQL string consisting of VALUE expressions. + def build_values_mapping(status_update_list) + values = status_update_list.map do |status_update| + external_id = status_update["external_id"] + status = status_update["status"] + status_reason = status_update["status_reason"] + recipient = status_update["recipient"] + + "('#{external_id}', '#{status}', '#{status_reason}', '#{recipient}')" + end + + ActiveRecord::Base.sanitize_sql(values.join(",")) + end - def extract_notification_type(raw_notification_type) - raw_notification_type.split("_").first + # Filters out SQS messages whose MessageGroupId isn't the one utilized by our VA Notify webhooks + # so that they're not accidentally processed. + # + # @param messages [Array] A collection of messages to be filtered. + # + # @return [Array] + # Messages whose MessageGroupId matches the one this job expect. Messages with + # a different MessageGroupId will be ignored. + def filter_messages_by_group_id(messages) + messages.filter { _1.attributes["MessageGroupId"] == MESSAGE_GROUP_ID } end end diff --git a/app/jobs/push_priority_appeals_to_judges_job.rb b/app/jobs/push_priority_appeals_to_judges_job.rb index 7907a9eca08..8a1e8cfff68 100644 --- a/app/jobs/push_priority_appeals_to_judges_job.rb +++ b/app/jobs/push_priority_appeals_to_judges_job.rb @@ -20,6 +20,7 @@ def perform @genpop_distributions = distribute_genpop_priority_appeals perform_later_or_now(UpdateAppealAffinityDatesJob) + perform_later_or_now(ReturnLegacyAppealsToBoardJob) slack_service.send_notification(generate_report.join("\n"), self.class.name) rescue StandardError => error diff --git a/app/jobs/receive_notification_job.rb b/app/jobs/receive_notification_job.rb deleted file mode 100644 index 7294ce043af..00000000000 --- a/app/jobs/receive_notification_job.rb +++ /dev/null @@ -1,73 +0,0 @@ -# frozen_string_literal: true - -class ReceiveNotificationJob < CaseflowJob - queue_as ApplicationController.dependencies_faked? ? :receive_notifications : :"receive_notifications.fifo" - application_attr :hearing_schedule - - def perform(message) - if !message.nil? - message_attributes = message[:message_attributes] - if !message_attributes.nil? - # load reference value to obtain notification id for record lookup - notification_id = message_attributes[:reference][:string_value] - - # load intersecting fields that may change in our database - email_address = message_attributes[:email_address][:string_value] - phone_number = message_attributes[:phone_number][:string_value] - status = message_attributes[:status][:string_value] - type = message_attributes[:type][:string_value] - - # load record - audit_record = Notification.find_by(id: notification_id) - - compare_notification_audit_record(audit_record, email_address, phone_number, status, type) - - else - log_error("message_attributes was nil on the ReceiveNotificationListenerJob message. Exiting Job.") - end - else - log_error("There was no message passed into the ReceiveNotificationListener. Exiting job.") - end - end - - private - - # Purpose: Method to be called with an error need to be logged to the rails logger - # - # Params: error_message (Expecting a string) - Message to be logged to the logger - # - # Response: None - def log_error(error_message) - Rails.logger.error(error_message) - end - - # Purpose: Method to compare audit record from database with record in message - # - # Params: - # - audit_record - audit record to compare with message - # - email_address - email of recipient - # - phone_number = phone number of recipient - # - status - status of notification - # - type - sms or email, used to update email/text notification status - # - # Returns: Updated model from update_audit_record - def compare_notification_audit_record(audit_record, email_address, phone_number, status, type) # rubocop:disable Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity - status = status.capitalize - - if !email_address.nil? && audit_record.recipient_email != email_address - audit_record.update!(recipient_email: email_address) - end - - if !phone_number.nil? && audit_record.recipient_phone_number != phone_number - audit_record.update!(recipient_phone_number: phone_number) - end - - if type == "email" && !status.nil? && status != audit_record.email_notification_status - audit_record.update!(email_notification_status: status) - elsif type == "sms" && !status.nil? && status != audit_record.sms_notification_status - audit_record.update!(sms_notification_status: status) - end - - audit_record - end -end diff --git a/app/jobs/return_legacy_appeals_to_board_job.rb b/app/jobs/return_legacy_appeals_to_board_job.rb new file mode 100644 index 00000000000..a5ee1baba90 --- /dev/null +++ b/app/jobs/return_legacy_appeals_to_board_job.rb @@ -0,0 +1,252 @@ +# frozen_string_literal: true + +class ReturnLegacyAppealsToBoardJob < CaseflowJob + # For time_ago_in_words() + include ActionView::Helpers::DateHelper + + queue_with_priority :low_priority + application_attr :queue + + NO_RECORDS_FOUND_MESSAGE = [Constants.DISTRIBUTION.no_records_moved_message].freeze + + def perform + catch(:abort) do + begin + returned_appeal_job = create_returned_appeal_job + + appeals, moved_appeals = eligible_and_moved_appeals + + check_appeals_available(moved_appeals, returned_appeal_job) + + complete_returned_appeal_job(returned_appeal_job, "Job completed successfully", moved_appeals) + + # The rest of your code continues here + # Filter the appeals and send the filtered report + @filtered_appeals = filter_appeals(appeals, moved_appeals) + send_job_slack_report(slack_report) + rescue StandardError => error + handle_error(error, returned_appeal_job) + ensure + metrics_service_report_runtime(metric_group_name: "return_legacy_appeals_to_board_job") + end + end + end + + def filter_appeals(appeals, moved_appeals) + priority_appeals_moved, non_priority_appeals_moved = separate_by_priority(moved_appeals) + + remaining_priority_appeals, + remaining_non_priority_appeals = calculate_remaining_appeals( + appeals, + priority_appeals_moved, + non_priority_appeals_moved + ) + + { + priority_appeals_count: count_unique_bfkeys(priority_appeals_moved), + non_priority_appeals_count: count_unique_bfkeys(non_priority_appeals_moved), + remaining_priority_appeals_count: count_unique_bfkeys(remaining_priority_appeals), + remaining_non_priority_appeals_count: count_unique_bfkeys(remaining_non_priority_appeals), + grouped_by_avlj: grouped_by_avlj(moved_appeals) + } + end + + def eligible_and_moved_appeals + appeals = LegacyDocket.new.appeals_tied_to_non_ssc_avljs + moved_appeals = move_qualifying_appeals(appeals) + [appeals, moved_appeals] + end + + def grouped_by_avlj(moved_appeals) + return [] if moved_appeals.nil? + + moved_appeals.group_by { |appeal| VACOLS::Staff.find_by(sattyid: appeal["vlj"])&.sattyid }.keys.compact + end + + def count_unique_bfkeys(appeals) + appeals.map { |appeal| appeal["bfkey"] }.uniq.size + end + + private + + def move_qualifying_appeals(appeals) + qualifying_appeals_bfkeys = [] + + non_ssc_avljs.each do |non_ssc_avlj| + tied_appeals = appeals.select { |appeal| appeal["vlj"] == non_ssc_avlj.sattyid } + tied_appeals_bfkeys = get_tied_appeal_bfkeys(tied_appeals) + qualifying_appeals_bfkeys = update_qualifying_appeals_bfkeys(tied_appeals_bfkeys, qualifying_appeals_bfkeys) + end + + unless qualifying_appeals_bfkeys.empty? + qualifying_appeals = appeals + .select { |q_appeal| qualifying_appeals_bfkeys.include? q_appeal["bfkey"] } + .flatten + .sort_by { |appeal| [-appeal["priority"], appeal["bfd19"]] } + VACOLS::Case.batch_update_vacols_location("63", qualifying_appeals.map { |q_appeal| q_appeal["bfkey"] }) + end + + qualifying_appeals || [] + end + + def get_tied_appeal_bfkeys(tied_appeals) + tied_appeals_bfkeys = [] + + unless tied_appeals.empty? + tied_appeals_bfkeys = tied_appeals + .sort_by { |t_appeal| [-t_appeal["priority"], t_appeal["bfd19"]] } + .map { |t_appeal| t_appeal["bfkey"] } + .uniq + .flatten + end + + tied_appeals_bfkeys + end + + def update_qualifying_appeals_bfkeys(tied_appeals_bfkeys, qualifying_appeals_bfkeys) + if nonsscavlj_number_of_appeals_limit < 0 + fail StandardError, "CaseDistributionLever.nonsscavlj_number_of_appeals_to_move set below 0" + elsif nonsscavlj_number_of_appeals_limit == 0 + return qualifying_appeals_bfkeys + end + + if tied_appeals_bfkeys.any? + if tied_appeals_bfkeys.count < nonsscavlj_number_of_appeals_limit + qualifying_appeals_bfkeys.push(tied_appeals_bfkeys) + else + qualifying_appeals_bfkeys.push(tied_appeals_bfkeys[0..nonsscavlj_number_of_appeals_to_move_index]) + end + end + + qualifying_appeals_bfkeys.flatten + end + + def non_ssc_avljs + VACOLS::Staff.where("sactive = 'A' AND svlj = 'A' AND sattyid <> smemgrp") + end + + # Method to separate appeals by priority + def separate_by_priority(appeals) + return [] if appeals.nil? + + priority_appeals = appeals.select { |appeal| appeal["priority"] == 1 } || [] + non_priority_appeals = appeals.select { |appeal| appeal["priority"] == 0 } || [] + + [priority_appeals, non_priority_appeals] + end + + # Method to calculate remaining eligible appeals + def calculate_remaining_appeals(all_appeals, moved_priority_appeals, moved_non_priority_appeals) + return [] if all_appeals.nil? + + remaining_priority_appeals = calculate_remaining_priority_appeals(all_appeals, moved_priority_appeals) + remaining_non_priority_appeals = calculate_remaining_non_priority_appeals(all_appeals, moved_non_priority_appeals) + + [remaining_priority_appeals, remaining_non_priority_appeals] + end + + def calculate_remaining_priority_appeals(all_appeals, moved_priority_appeals) + starting_priority_appeals = all_appeals.select { |appeal| appeal["priority"] == 1 } + + if (moved_priority_appeals - starting_priority_appeals).empty? + remaining_priority_appeals = (starting_priority_appeals - moved_priority_appeals) || [] + else + fail StandardError, "An invalid priority appeal was detected in the list of moved appeals: "\ + "#{moved_priority_appeals - starting_priority_appeals}" + end + + remaining_priority_appeals + end + + def calculate_remaining_non_priority_appeals(all_appeals, moved_non_priority_appeals) + starting_non_priority_appeals = all_appeals.select { |appeal| appeal["priority"] == 0 } + + if (moved_non_priority_appeals - starting_non_priority_appeals).empty? + remaining_non_priority_appeals = (starting_non_priority_appeals - moved_non_priority_appeals) || [] + else + fail StandardError, "An invalid non-priority appeal was detected in the list of moved appeals: "\ + "#{moved_non_priority_appeals - starting_non_priority_appeals}" + end + + remaining_non_priority_appeals + end + + # Method to fetch non-SSC AVLJs SATTYIDS that appeals were moved to location '63' + def fetch_moved_sattyids(moved_appeals) + return [] if moved_appeals.nil? + + moved_appeals.map { |appeal| VACOLS::Staff.find_by(sattyid: appeal["vlj"]) } + .compact + .uniq + .map(&:sattyid) || [] + end + + def nonsscavlj_number_of_appeals_limit + @nonsscavlj_number_of_appeals_limit ||= CaseDistributionLever.nonsscavlj_number_of_appeals_to_move || 0 + end + + def nonsscavlj_number_of_appeals_to_move_index + @nonsscavlj_number_of_appeals_to_move_index ||= nonsscavlj_number_of_appeals_limit - 1 + end + + def create_returned_appeal_job + ReturnedAppealJob.create!( + started_at: Time.zone.now, + stats: { message: "Job started" }.to_json + ) + end + + def check_appeals_available(moved_appeals, returned_appeal_job) + if moved_appeals.nil? + complete_returned_appeal_job(returned_appeal_job, Constants.DISTRIBUTION.no_records_moved_message, []) + send_job_slack_report(NO_RECORDS_FOUND_MESSAGE) + throw(:abort) + end + end + + def handle_error(error, returned_appeal_job) + @start_time ||= Time.zone.now + message = "Job failed with error: #{error.message}" + errored_returned_appeal_job(returned_appeal_job, message) + duration = time_ago_in_words(@start_time) + slack_service.send_notification("\n [ERROR] after running for #{duration}: #{error.message}", + self.class.name) + log_error(error) + message + end + + def complete_returned_appeal_job(returned_appeal_job, message, appeals) + appeals ||= [] + returned_appeal_job.update!( + completed_at: Time.zone.now, + stats: { message: message }.to_json, + returned_appeals: appeals.map { |appeal| appeal["bfkey"] }.uniq + ) + end + + def errored_returned_appeal_job(returned_appeal_job, message) + returned_appeal_job.update!( + errored_at: Time.zone.now, + stats: { message: message }.to_json + ) + end + + def send_job_slack_report(slack_message) + if slack_message.blank? + fail StandardError, "Slack message cannot be empty or nil" + end + + slack_service.send_notification(slack_message.join("\n"), self.class.name) + end + + def slack_report + report = [] + report << "Job performed successfully" + report << "Total Priority Appeals Moved: #{@filtered_appeals[:priority_appeals_count]}" + report << "Total Non-Priority Appeals Moved: #{@filtered_appeals[:non_priority_appeals_count]}" + report << "Total Remaining Priority Appeals: #{@filtered_appeals[:remaining_priority_appeals_count]}" + report << "Total Remaining Non-Priority Appeals: #{@filtered_appeals[:remaining_non_priority_appeals_count]}" + report << "SATTYIDs of Non-SSC AVLJs Moved: #{@filtered_appeals[:grouped_by_avlj].join(', ')}" + report + end +end diff --git a/app/jobs/send_notification_job.rb b/app/jobs/send_notification_job.rb index 4438014e8ec..9bce677e847 100644 --- a/app/jobs/send_notification_job.rb +++ b/app/jobs/send_notification_job.rb @@ -43,12 +43,11 @@ class SendNotificationJobError < StandardError; end class << self def queue_name_suffix - ApplicationController.dependencies_faked? ? :send_notifications : :"send_notifications.fifo" + :"send_notifications.fifo" end end # Must receive JSON string as argument - def perform(message_json) ensure_current_user_is_set @@ -57,18 +56,12 @@ def perform(message_json) @message = validate_message(JSON.parse(message_json, object_class: OpenStruct)) - transaction_wrapper do + ActiveRecord::Base.transaction do @notification_audit = find_or_create_notification_audit update_notification_statuses send_to_va_notify if message_status_valid? end rescue StandardError => error - if Rails.deploy_env?(:prodtest) && error.in?(DISCARD_ERRORS) - transaction_wrapper do - @notification_audit = find_or_create_notification_audit - end - end - log_error(error) raise error end @@ -76,19 +69,6 @@ def perform(message_json) private - # Conditionally wraps database operations in a transaction block depending on whether - # the current environment is ProdTest. The choice to not have ProdTest queries utilize - # a transction is due to how unlikely it will be for us to have an operation VA Notify - # integration in that environment due to this environment having production-replicated - # data and us not wanting to inadvertently transmit messages to actual recipients. - # - # The lack of a transaction block will prevent rollbacks on the records created in the - # notifications table and allow for observations around notification accuracy to be - # more easily obtained. - def transaction_wrapper - ActiveRecord::Base.transaction { yield } - end - def event_type message.template_name end diff --git a/app/jobs/sync_reviews_job.rb b/app/jobs/sync_reviews_job.rb index e7be4761efb..92a26c5aa81 100644 --- a/app/jobs/sync_reviews_job.rb +++ b/app/jobs/sync_reviews_job.rb @@ -58,7 +58,7 @@ def perform_decision_rating_issues_syncs(limit) def reprocess_decision_documents(limit) DecisionDocument.requires_processing.limit(limit).each do |decision_document| - ProcessDecisionDocumentJob.perform_later(decision_document.id) + ProcessDecisionDocumentJob.perform_later(decision_document.id, decision_document.for_contested_claim?) end end end diff --git a/app/jobs/update_cached_appeals_attributes_job.rb b/app/jobs/update_cached_appeals_attributes_job.rb index 3bf31ad2eff..ad78ee9e564 100644 --- a/app/jobs/update_cached_appeals_attributes_job.rb +++ b/app/jobs/update_cached_appeals_attributes_job.rb @@ -124,9 +124,6 @@ def log_error(start_time, err) # We do not log every job failure since we expect the job to occasionally fail when we lose # database connections. Since this job runs regularly, we will continue to cache appeals and we # have set up alerts to notify us if we have cached too few appeals over the past day: - # * (Too little Postgres data cached) https://app.datadoghq.com/monitors/41421962 - # * (Too little VACOLS data cached) https://app.datadoghq.com/monitors/41234223 - # * (Job has not succeeded in the past day) https://app.datadoghq.com/monitors/41423568 record_error_in_metrics_service metrics_service_report_runtime(metric_group_name: METRIC_GROUP_NAME) diff --git a/app/jobs/va_notify_status_update_job.rb b/app/jobs/va_notify_status_update_job.rb deleted file mode 100644 index 0072204ab5d..00000000000 --- a/app/jobs/va_notify_status_update_job.rb +++ /dev/null @@ -1,187 +0,0 @@ -# frozen_string_literal: true - -class VANotifyStatusUpdateJob < CaseflowJob - queue_with_priority :low_priority - application_attr :hearing_schedule - - QUERY_LIMIT = ENV["VA_NOTIFY_STATUS_UPDATE_BATCH_LIMIT"] - VALID_NOTIFICATION_STATUSES = %w[Success temporary-failure technical-failure sending created].freeze - - # Description: Jobs main perform method that will find all notification records that do not have - # status updates from VA Notify and calls VA Notify API to get the latest status - # - # Params: None - # - # Retuns: None - def perform # rubocop:disable Metrics/AbcSize, Metrics/CyclomaticComplexity, Metrics/MethodLength, Metrics/PerceivedComplexity - notifications_not_processed.each do |notification| - sms_external_id = notification.sms_notification_external_id - email_external_id = notification.email_notification_external_id - case notification.notification_type - when "Email" - if !email_external_id.nil? - update_attributes = get_current_status(email_external_id, "Email") - update_notification_audit_record(notification, update_attributes) - else - log_error("Notification Record " + notification.id.to_s + "With Email type does not have an external id.") - update_notification_audit_record(notification, "email_notification_status" => "No External Id") - end - when "SMS" - if !sms_external_id.nil? - update_attributes = get_current_status(sms_external_id, "SMS") - update_notification_audit_record(notification, update_attributes) - else - log_error("Notification Record " + notification.id.to_s + "With SMS type does not have an external id.") - update_notification_audit_record(notification, "sms_notification_status" => "No External Id") - end - when "Email and SMS" - if !email_external_id.nil? - update_attributes = get_current_status(email_external_id, "Email") - update_notification_audit_record(notification, update_attributes) - else - log_error("Notification Record " + notification.id.to_s + "With Email and SMS type does not have an \ - email external id.") - update_notification_audit_record(notification, "email_notification_status" => "No External Id") - end - if !sms_external_id.nil? - update_attributes = get_current_status(sms_external_id, "SMS") - update_notification_audit_record(notification, update_attributes) - else - log_error("Notification Record " + notification.id.to_s + "With Email and SMS type does not have a \ - SMS external id.") - update_notification_audit_record(notification, "sms_notification_status" => "No External Id") - end - end - notification.save! - end - end - - private - - # Description: Method that applies a query limit to the list of notification records that - # will get the status checked for. - # them from VA Notiufy - # - # Params: None - # - # Retuns: Lits of Notification records that has QUERY_LIMIT or less records - def notifications_not_processed - if !QUERY_LIMIT.nil? && QUERY_LIMIT.is_a?(String) - find_notifications_not_processed.first(QUERY_LIMIT.to_i) - else - log_info("VANotifyStatusJob can not read the VA_NOTIFY_STATUS_UPDATE_BATCH_LIMIT environment variable.\ - Defaulting to 650.") - find_notifications_not_processed.first(650) - end - end - - # Description: Method to query the Notification database for Notififcation - # records that have not been updated with a VA Notify Status - # - # Params: None - # - # Retuns: Lits of Notification Active Record associations meeting the where condition - def find_notifications_not_processed - Notification.select(Arel.star).where( - Arel::Nodes::Group.new( - email_status_check.or( - sms_status_check.or( - email_and_sms_status_check - ) - ) - ) - ) - .where(created_at: 4.days.ago..Time.zone.now) - .order(created_at: :desc) - end - - def email_status_check - Notification.arel_table[:notification_type].eq("Email").and( - generate_valid_status_check(:email_notification_status) - ) - end - - def sms_status_check - Notification.arel_table[:notification_type].eq("SMS").and( - generate_valid_status_check(:sms_notification_status) - ) - end - - def email_and_sms_status_check - Notification.arel_table[:notification_type].eq("Email and SMS").and( - generate_valid_status_check(:email_notification_status).or( - generate_valid_status_check(:sms_notification_status) - ) - ) - end - - def generate_valid_status_check(col_name_sym) - Notification.arel_table[col_name_sym].in(VALID_NOTIFICATION_STATUSES) - end - - # Description: Method to be called when an error message need to be logged - # - # Params: Error message to be logged - # - # Retuns: None - def log_error(message) - Rails.logger.error(message) - end - - # Description: Method to be called when an info message need to be logged - # - # Params: Info message to be logged - # - # Retuns: None - def log_info(message) - Rails.logger.info(message) - end - - # Description: Method that will get the VA Notify Status for the notification based on notification type - # - # - # Params: - # notification_id - The external id that VA Notify assigned to each notification. Can be for Email or SMS - # type - Type of notification to get status for - # values - Email, SMS or Email and SMS - # - # Retuns: Return a hash of attributes that need to be updated on the notification record - def get_current_status(notification_id, type) - begin - response = VANotifyService.get_status(notification_id) - if type == "Email" - { "email_notification_status" => response.body["status"], "recipient_email" => response.body["email_address"] } - elsif type == "SMS" - { "sms_notification_status" => response.body["status"], "recipient_phone_number" => - response.body["phone_number"] } - else - message = "Type neither email nor sms" - log_error("VA Notify API returned error for notificiation " + notification_id + " with type " + type) - Raven.capture_exception(type, extra: { error_uuid: error_uuid, message: message }) - end - rescue Caseflow::Error::VANotifyApiError => error - log_error( - "VA Notify API returned error for notification " + notification_id + " with error #{error}" - ) - Raven.capture_exception(error, extra: { error_uuid: error_uuid }) - nil - end - end - - # Description: Method that will update the notification record values - # - # Params: - # notification_audit_record - Notification Record to be updated - # to_update - Hash containing the column names and values to be updated - # - # Retuns: Lits of Notification records that has QUERY_LIMIT or less records - def update_notification_audit_record(notification_audit_record, to_update) - to_update&.each do |key, value| - notification_audit_record[key] = value - end - end -end - -def error_uuid - @error_uuid ||= SecureRandom.uuid -end diff --git a/app/jobs/virtual_hearings/create_conference_job.rb b/app/jobs/virtual_hearings/create_conference_job.rb index e88973d77c0..0c390164315 100644 --- a/app/jobs/virtual_hearings/create_conference_job.rb +++ b/app/jobs/virtual_hearings/create_conference_job.rb @@ -123,7 +123,7 @@ def log_virtual_hearing_state(virtual_hearing) Rails.logger.info("Establishment Updated At: (#{virtual_hearing.establishment.updated_at})") end - def create_conference_datadog_tags + def create_conference_tags custom_metric_info.merge(attrs: { hearing_id: virtual_hearing.hearing_id }) end @@ -149,12 +149,12 @@ def create_conference virtual_hearing.establishment.update_error!(error_display) - MetricsService.increment_counter(metric_name: "created_conference.failed", **create_conference_datadog_tags) + MetricsService.increment_counter(metric_name: "created_conference.failed", **create_conference_tags) fail pexip_response.error end - MetricsService.increment_counter(metric_name: "created_conference.successful", **create_conference_datadog_tags) + MetricsService.increment_counter(metric_name: "created_conference.successful", **create_conference_tags) virtual_hearing.update(conference_id: pexip_response.data[:conference_id]) end diff --git a/app/jobs/virtual_hearings/delete_conferences_job.rb b/app/jobs/virtual_hearings/delete_conferences_job.rb index fcc77df526f..9f713c64cf5 100644 --- a/app/jobs/virtual_hearings/delete_conferences_job.rb +++ b/app/jobs/virtual_hearings/delete_conferences_job.rb @@ -117,13 +117,13 @@ def count_deleted_and_log(enumerable) if removed > 0 MetricsService.increment_counter( - metric_name: "deleted_conferences.successful", by: removed, ** custom_metric_info + metric_name: "deleted_conferences.successful", ** custom_metric_info ) end if failed > 0 MetricsService.increment_counter( - metric_name: "deleted_conferences.failed", by: failed, ** custom_metric_info + metric_name: "deleted_conferences.failed", ** custom_metric_info ) end end diff --git a/app/models/appeal_state.rb b/app/models/appeal_state.rb index 6acd80c9c8f..c71700f39b0 100644 --- a/app/models/appeal_state.rb +++ b/app/models/appeal_state.rb @@ -443,9 +443,14 @@ def update_appeal_state_action!(status_to_update) if status_to_update == :appeal_cancelled existing_statuses.merge!({ privacy_act_complete: false, - privacy_act_pending: false + privacy_act_pending: false, + appeal_docketed: false }) end + + if status_to_update == :decision_mailed + existing_statuses[:appeal_docketed] = false + end end) end end diff --git a/app/models/case_distribution_lever.rb b/app/models/case_distribution_lever.rb index 54d9c59ae1e..a2ab54e3a9b 100644 --- a/app/models/case_distribution_lever.rb +++ b/app/models/case_distribution_lever.rb @@ -27,6 +27,7 @@ class CaseDistributionLever < ApplicationRecord #{Constants.DISTRIBUTION.ama_hearing_docket_time_goals} #{Constants.DISTRIBUTION.ama_hearing_start_distribution_prior_to_goals} #{Constants.DISTRIBUTION.ama_evidence_submission_start_distribution_prior_to_goals} + #{Constants.DISTRIBUTION.nonsscavlj_number_of_appeals_to_move} ).freeze FLOAT_LEVERS = %W( diff --git a/app/models/concerns/distribution_scopes.rb b/app/models/concerns/distribution_scopes.rb index 08fbfc2472d..81ea761fa56 100644 --- a/app/models/concerns/distribution_scopes.rb +++ b/app/models/concerns/distribution_scopes.rb @@ -299,6 +299,11 @@ def case_affinity_days_lever_value_is_selected?(lever_value) true end + def tied_to_judges(judge_ids) + with_appeal_affinities + .where(hearings: { judge_id: judge_ids }) + end + def exclude_affinity_and_ineligible_judge_ids judge_ids = JudgeTeam.judges_with_exclude_appeals_from_affinity diff --git a/app/models/decision_document.rb b/app/models/decision_document.rb index ec6496e8321..13d7a0ec93b 100644 --- a/app/models/decision_document.rb +++ b/app/models/decision_document.rb @@ -61,12 +61,16 @@ def submit_for_processing!(delay: processing_delay) super if not_processed_or_decision_date_not_in_the_future? - ProcessDecisionDocumentJob.perform_later(id, mail_package) + # Below we're grabbing the boolean value at this point in time. + # This will act as a point of truth that wont be affected by the + # async behavior of the outcode function due to triggering jobs. + + ProcessDecisionDocumentJob.perform_later(id, for_contested_claim?, mail_package) end end # rubocop:disable Metrics/CyclomaticComplexity - def process!(mail_package) + def process!(_contested, mail_package) return if processed? fail NotYetSubmitted unless submitted_and_ready? @@ -118,6 +122,15 @@ def all_contention_records(epe) contention_records(epe) end + def for_contested_claim? + case appeal_type + when "Appeal" + appeal.contested_claim? + when "LegacyAppeal" + appeal.contested_claim + end + end + private attr_reader :mail_package diff --git a/app/models/docket.rb b/app/models/docket.rb index 73dc54358e6..b830e5fcf41 100644 --- a/app/models/docket.rb +++ b/app/models/docket.rb @@ -114,6 +114,12 @@ def ready_priority_appeal_ids appeals(priority: true, ready: true).pluck(:uuid) end + def tied_to_vljs(judge_ids) + docket_appeals.ready_for_distribution + .most_recent_hearings + .tied_to_judges(judge_ids) + end + # rubocop:disable Metrics/MethodLength, Lint/UnusedMethodArgument, Metrics/PerceivedComplexity # :reek:FeatureEnvy def distribute_appeals(distribution, priority: false, genpop: nil, limit: 1, style: "push") diff --git a/app/models/dockets/legacy_docket.rb b/app/models/dockets/legacy_docket.rb index 96c3eb30f87..44d332291f5 100644 --- a/app/models/dockets/legacy_docket.rb +++ b/app/models/dockets/legacy_docket.rb @@ -14,6 +14,18 @@ def ready_to_distribute_appeals LegacyAppeal.repository.ready_to_distribute_appeals end + def appeals_tied_to_non_ssc_avljs + LegacyAppeal.repository.appeals_tied_to_non_ssc_avljs + end + + def appeals_tied_to_avljs_and_vljs + LegacyAppeal.repository.appeals_tied_to_avljs_and_vljs + end + + def loc_63_appeals + LegacyAppeal.repository.loc_63_appeals + end + # rubocop:disable Metrics/CyclomaticComplexity def count(priority: nil, ready: nil) counts_by_priority_and_readiness.inject(0) do |sum, row| diff --git a/app/models/metric.rb b/app/models/metric.rb index 3e3f515bfa2..ecdf62164b4 100644 --- a/app/models/metric.rb +++ b/app/models/metric.rb @@ -7,7 +7,6 @@ class Metric < CaseflowRecord METRIC_TYPES = { error: "error", log: "log", performance: "performance", info: "info" }.freeze LOG_SYSTEMS = { dynatrace: "dynatrace", - datadog: "datadog", rails_console: "rails_console", javascript_console: "javascript_console" }.freeze diff --git a/app/models/prepend/va_notify/appeal_decision_mailed.rb b/app/models/prepend/va_notify/appeal_decision_mailed.rb index 708af3e4955..9618ed5af0d 100644 --- a/app/models/prepend/va_notify/appeal_decision_mailed.rb +++ b/app/models/prepend/va_notify/appeal_decision_mailed.rb @@ -12,16 +12,11 @@ module AppealDecisionMailed # Params: none # # Response: returns true if successfully processed, returns false if not successfully processed (will not notify) - def process!(mail_package = nil) + def process!(contested, mail_package = nil) super_return_value = super if processed? appeal.appeal_state.decision_mailed_appeal_state_update_action! - case appeal_type - when "Appeal" - template = appeal.contested_claim? ? CONTESTED_CLAIM : NON_CONTESTED_CLAIM - when "LegacyAppeal" - template = appeal.contested_claim ? CONTESTED_CLAIM : NON_CONTESTED_CLAIM - end + template = contested ? CONTESTED_CLAIM : NON_CONTESTED_CLAIM AppellantNotification.notify_appellant(appeal, template) end super_return_value diff --git a/app/models/prepend/va_notify/appellant_notification.rb b/app/models/prepend/va_notify/appellant_notification.rb index 77e4df6ec8b..e13f36dbaf6 100644 --- a/app/models/prepend/va_notify/appellant_notification.rb +++ b/app/models/prepend/va_notify/appellant_notification.rb @@ -23,10 +23,23 @@ def status end end + class InactiveAppealError < StandardError + def initialize(appeal_id, message = "The appeal status is inactive") + super(message + " for appeal with id #{appeal_id}") + end + + def status + "Inactive" + end + end + class NoAppealError < StandardError; end - def self.handle_errors(appeal) + def self.handle_errors(appeal, template_name) fail NoAppealError if appeal.nil? + if template_name == Constants.EVENT_TYPE_FILTERS.quarterly_notification && !appeal.active? + fail InactiveAppealError, appeal.external_id + end message_attributes = {} message_attributes[:appeal_type] = appeal.class.to_s @@ -68,7 +81,7 @@ def self.notify_appellant( end def self.create_payload(appeal, template_name, appeal_status = nil) - message_attributes = AppellantNotification.handle_errors(appeal) + message_attributes = AppellantNotification.handle_errors(appeal, template_name) VANotifySendMessageTemplate.new(message_attributes, template_name, appeal_status) end diff --git a/app/models/returned_appeal_job.rb b/app/models/returned_appeal_job.rb new file mode 100644 index 00000000000..90b3ce5aa15 --- /dev/null +++ b/app/models/returned_appeal_job.rb @@ -0,0 +1,4 @@ +# frozen_string_literal: true + +class ReturnedAppealJob < ApplicationRecord +end diff --git a/app/models/serializers/work_queue/appeal_search_serializer.rb b/app/models/serializers/work_queue/appeal_search_serializer.rb index 90be5a6f498..320c5bdb075 100644 --- a/app/models/serializers/work_queue/appeal_search_serializer.rb +++ b/app/models/serializers/work_queue/appeal_search_serializer.rb @@ -6,6 +6,16 @@ class WorkQueue::AppealSearchSerializer set_type :appeal + RESTRICTED_STATUSES = + [ + :distributed_to_judge, + :ready_for_signature, + :on_hold, + :misc, + :unknown, + :assigned_to_attorney + ].freeze + attribute :contested_claim, &:contested_claim? attribute :mst, &:mst? @@ -73,10 +83,11 @@ class WorkQueue::AppealSearchSerializer attribute :veteran_appellant_deceased, &:veteran_appellant_deceased? attribute :assigned_to_location do |object, params| - if object&.status&.status == :distributed_to_judge - if params[:user]&.judge? || params[:user]&.attorney? || User.list_hearing_coordinators.include?(params[:user]) + if RESTRICTED_STATUSES.include?(object&.status&.status) + unless params[:user]&.vso_employee? object.assigned_to_location end + # if not in a restricted status, show assigned location to all users else object.assigned_to_location end diff --git a/app/models/vacols/case_docket.rb b/app/models/vacols/case_docket.rb index cbaaed7960c..65c08422380 100644 --- a/app/models/vacols/case_docket.rb +++ b/app/models/vacols/case_docket.rb @@ -91,7 +91,8 @@ class DocketNumberCentennialLoop < StandardError; end " # Judges 000, 888, and 999 are not real judges, but rather VACOLS codes. - + # This query will create multiple records/rows for each BRIEFF if the BRIEFF has multiple hearings + # This may need to be accounted for by making sure the resultant set is filtered by BFKEY JOIN_ASSOCIATED_VLJS_BY_HEARINGS = " left join ( select distinct TITRNUM, TINUM, @@ -114,7 +115,6 @@ class DocketNumberCentennialLoop < StandardError; end F.TITRNUM as PREV_TITRNUM from BRIEFF B inner join FOLDER F on F.TICKNUM = B.BFKEY - where B.BFMPRO = 'HIS' and B.BFMEMID not in ('000', '888', '999') and B.BFATTID is not null ) PREV_APPEAL on PREV_APPEAL.PREV_BFKEY != BRIEFF.BFKEY and PREV_APPEAL.PREV_BFCORLID = BRIEFF.BFCORLID @@ -187,8 +187,28 @@ class DocketNumberCentennialLoop < StandardError; end ) " - # this query should not be used during distribution it is only intended for reporting usage + # selects both priority and non-priority appeals that are ready to distribute SELECT_READY_TO_DISTRIBUTE_APPEALS_ORDER_BY_BFD19 = " + select APPEALS.BFKEY, APPEALS.TINUM, APPEALS.BFD19, APPEALS.BFDLOOUT, + case when APPEALS.BFAC = '7' or APPEALS.AOD = 1 then 1 else 0 end PRIORITY, + APPEALS.VLJ, APPEALS.PREV_DECIDING_JUDGE, APPEALS.HEARING_DATE, APPEALS.PREV_BFDDEC + from ( + select BRIEFF.BFKEY, BRIEFF.TINUM, BFD19, BFDLOOUT, BFAC, AOD, + case when BFHINES is null or BFHINES <> 'GP' then VLJ_HEARINGS.VLJ end VLJ + , PREV_APPEAL.PREV_DECIDING_JUDGE PREV_DECIDING_JUDGE + , VLJ_HEARINGS.HEARING_DATE HEARING_DATE + , PREV_APPEAL.PREV_BFDDEC PREV_BFDDEC + from ( + #{SELECT_READY_APPEALS} + ) BRIEFF + #{JOIN_ASSOCIATED_VLJS_BY_HEARINGS} + #{JOIN_PREVIOUS_APPEALS} + order by BFD19 + ) APPEALS + " + + # this query should not be used during distribution it is only intended for reporting usage + SELECT_READY_TO_DISTRIBUTE_APPEALS_ORDER_BY_BFD19_ADDITIONAL_COLS = " select APPEALS.BFKEY, APPEALS.TINUM, APPEALS.BFD19, APPEALS.BFDLOOUT, APPEALS.AOD, APPEALS.BFCORLID, CORRES.SNAMEF, CORRES.SNAMEL, CORRES.SSN, STAFF.SNAMEF as VLJ_NAMEF, STAFF.SNAMEL as VLJ_NAMEL, @@ -210,7 +230,51 @@ class DocketNumberCentennialLoop < StandardError; end left join STAFF on APPEALS.VLJ = STAFF.SATTYID order by BFD19 " + + FROM_LOC_63_APPEALS = " + from BRIEFF + #{VACOLS::Case::JOIN_AOD} + inner join FOLDER on FOLDER.TICKNUM = BRIEFF.BFKEY + where BRIEFF.BFCURLOC in ('63') + and BRIEFF.BFBOX is null + and BRIEFF.BFAC is not null + and BRIEFF.BFD19 is not null + " + + SELECT_LOC_63_APPEALS = " + select BFKEY, BFD19, BFDLOCIN, BFCORLID, BFDLOOUT, BFMPRO, BFCORKEY, BFCURLOC, BFAC, BFHINES, TINUM, TITRNUM, AOD, + BFMEMID, BFDPDCN + #{FROM_LOC_63_APPEALS} + " + # rubocop:disable Metrics/MethodLength + SELECT_APPEALS_IN_LOCATION_63_FROM_PAST_2_DAYS = " + select APPEALS.BFKEY, APPEALS.TINUM, APPEALS.BFD19, APPEALS.BFMEMID, APPEALS.BFCURLOC, + APPEALS.BFDLOCIN, APPEALS.BFCORLID, APPEALS.BFDLOOUT, + case when APPEALS.BFAC = '7' or APPEALS.AOD = 1 then 1 else 0 end AOD, + case when APPEALS.BFAC = '7' then 1 else 0 end CAVC, + APPEALS.VLJ, APPEALS.PREV_DECIDING_JUDGE, APPEALS.HEARING_DATE, APPEALS.PREV_BFDDEC, + CORRES.SNAMEF, CORRES.SNAMEL, CORRES.SSN, + STAFF.SNAMEF as VLJ_NAMEF, STAFF.SNAMEL as VLJ_NAMEL + from ( + select BRIEFF.BFKEY, BRIEFF.TINUM, BFD19, BFDLOOUT, BFAC, BFCORKEY, BFMEMID, BFCURLOC, + BRIEFF.BFDLOCIN, BFCORLID, AOD, + case when BFHINES is null or BFHINES <> 'GP' then VLJ_HEARINGS.VLJ end VLJ + , PREV_APPEAL.PREV_DECIDING_JUDGE PREV_DECIDING_JUDGE + , VLJ_HEARINGS.HEARING_DATE HEARING_DATE + , PREV_APPEAL.PREV_BFDDEC PREV_BFDDEC + from ( + #{SELECT_LOC_63_APPEALS} + ) BRIEFF + #{JOIN_ASSOCIATED_VLJS_BY_HEARINGS} + #{JOIN_PREVIOUS_APPEALS} + where BRIEFF.BFDLOCIN >= TRUNC(CURRENT_DATE) - 2 + order by BFD19 + ) APPEALS + left join CORRES on APPEALS.BFCORKEY = CORRES.STAFKEY + left join STAFF on APPEALS.VLJ = STAFF.SATTYID + " + def self.counts_by_priority_and_readiness query = <<-SQL select count(*) N, PRIORITY, READY @@ -486,7 +550,51 @@ def self.priority_ready_appeal_vacols_ids def self.ready_to_distribute_appeals query = <<-SQL + #{SELECT_READY_TO_DISTRIBUTE_APPEALS_ORDER_BY_BFD19_ADDITIONAL_COLS} + SQL + + fmtd_query = sanitize_sql_array([query]) + connection.exec_query(fmtd_query).to_a + end + + def self.loc_63_appeals + query = <<-SQL + #{SELECT_APPEALS_IN_LOCATION_63_FROM_PAST_2_DAYS} + SQL + + fmtd_query = sanitize_sql_array([query]) + connection.exec_query(fmtd_query).to_a + end + + def self.appeals_tied_to_non_ssc_avljs + query = <<-SQL + with non_ssc_avljs as ( + #{VACOLS::Staff::NON_SSC_AVLJS} + ) + #{SELECT_READY_TO_DISTRIBUTE_APPEALS_ORDER_BY_BFD19} + where APPEALS.VLJ in (select * from non_ssc_avljs) + and ( + APPEALS.PREV_DECIDING_JUDGE is null or + ( + APPEALS.PREV_DECIDING_JUDGE = APPEALS.VLJ + AND APPEALS.HEARING_DATE <= APPEALS.PREV_BFDDEC + ) + ) + order by BFD19 + SQL + + fmtd_query = sanitize_sql_array([query]) + connection.exec_query(fmtd_query).to_a + end + + def self.appeals_tied_to_avljs_and_vljs + query = <<-SQL + with all_avljs_andvljs as ( + #{VACOLS::Staff::ALL_AVLJS_AND_VLJS} + ) #{SELECT_READY_TO_DISTRIBUTE_APPEALS_ORDER_BY_BFD19} + where APPEALS.VLJ in (select * from all_avljs_andvljs) + order by BFD19 SQL fmtd_query = sanitize_sql_array([query]) @@ -727,7 +835,7 @@ def self.cavc_affinity_filter(appeals, judge_sattyid, cavc_affinity_lever_value, reject_due_to_affinity?(appeal, cavc_affinity_lever_value) elsif cavc_affinity_lever_value == Constants.ACD_LEVERS.infinite - next if ineligible_judges_sattyids&.include?(appeal["vlj"]) + next if hearing_judge_ineligible_with_no_hearings_after_decision(appeal) appeal["prev_deciding_judge"] != judge_sattyid elsif cavc_affinity_lever_value == Constants.ACD_LEVERS.omit @@ -759,7 +867,7 @@ def self.cavc_aod_affinity_filter(appeals, judge_sattyid, cavc_aod_affinity_leve reject_due_to_affinity?(appeal, cavc_aod_affinity_lever_value) elsif cavc_aod_affinity_lever_value == Constants.ACD_LEVERS.infinite - next if ineligible_judges_sattyids&.include?(appeal["vlj"]) + next if hearing_judge_ineligible_with_no_hearings_after_decision(appeal) appeal["prev_deciding_judge"] != judge_sattyid elsif cavc_aod_affinity_lever_value == Constants.ACD_LEVERS.omit @@ -813,6 +921,10 @@ def self.reject_due_to_affinity?(appeal, lever) .affinity_start_date > lever.to_i.days.ago) end + def self.hearing_judge_ineligible_with_no_hearings_after_decision(appeal) + ineligible_judges_sattyids&.include?(appeal["vlj"]) && !appeal_has_hearing_after_previous_decision?(appeal) + end + def self.ineligible_judges_sattyids Rails.cache.fetch("case_distribution_ineligible_judges")&.pluck(:sattyid)&.reject(&:blank?) || [] end diff --git a/app/models/vacols/staff.rb b/app/models/vacols/staff.rb index 3a7bc6100de..a0f09fcb72e 100644 --- a/app/models/vacols/staff.rb +++ b/app/models/vacols/staff.rb @@ -14,6 +14,21 @@ class VACOLS::Staff < VACOLS::Record scope :judge, -> { pure_judge.or(acting_judge) } scope :attorney, -> { pure_attorney.or(acting_judge) } + NON_SSC_AVLJS = " + select sattyid + from staff + where sattyid <> smemgrp + and svlj = 'A' + and sactive = 'A' + " + + ALL_AVLJS_AND_VLJS = " + select sattyid + from staff + where svlj in ('A', 'J') + and sactive in ('A', 'I') + " + def self.find_by_css_id(css_id) find_by(sdomainid: css_id) end diff --git a/app/queries/appeals_in_location_63_in_past_2_days.rb b/app/queries/appeals_in_location_63_in_past_2_days.rb new file mode 100644 index 00000000000..a038f01d67d --- /dev/null +++ b/app/queries/appeals_in_location_63_in_past_2_days.rb @@ -0,0 +1,104 @@ +# frozen_string_literal: true + +class AppealsInLocation63InPast2Days + HEADERS = { + docket_number: "Docket Number", + aod: "AOD", + cavc: "CAVC", + receipt_date: "Receipt Date", + ready_for_distribution_at: "Ready for Distribution at", + veteran_file_number: "Veteran File number", + veteran_name: "Veteran", + hearing_judge_id: "Most Recent Hearing Judge ID", + hearing_judge_name: "Most Recent Hearing Judge Name", + deciding_judge_id: "Most Recent Deciding Judge ID", + deciding_judge_name: "Most Recent Deciding Judge Name", + affinity_start_date: "Affinity Start Date", + moved_date_time: "Date/Time Moved", + bfcurloc: "BFCURLOC" + }.freeze + + def self.generate_rows(record) + HEADERS.keys.map { |key| record[key] } + end + + def self.process + # Convert results to CSV format + + CSV.generate(headers: true) do |csv| + # Add headers to CSV + csv << HEADERS.values + + # Iterate through results and add each row to CSV + loc_63_appeals.each do |record| + csv << generate_rows(record) + end + end + end + + def self.loc_63_appeals + docket_coordinator = DocketCoordinator.new + + docket_coordinator.dockets + .flat_map do |sym, docket| + if sym == :legacy + appeals = docket.loc_63_appeals + legacy_rows(appeals).uniq { |record| record[:docket_number] } + else + [] + end + end + end + + def self.legacy_rows(appeals) + unsorted_result = appeals.map do |appeal| + calculated_values = calculate_field_values(appeal) + { + docket_number: appeal["tinum"], + aod: appeal["aod"] == 1, + cavc: appeal["cavc"] == 1, + receipt_date: appeal["bfd19"], + ready_for_distribution_at: appeal["bfdloout"], + veteran_file_number: appeal["ssn"] || appeal["bfcorlid"], + veteran_name: calculated_values[:veteran_name], + hearing_judge_id: calculated_values[:hearing_judge_id], + hearing_judge_name: calculated_values[:hearing_judge_name], + deciding_judge_id: calculated_values[:deciding_judge_id], + deciding_judge_name: calculated_values[:deciding_judge_name], + affinity_start_date: calculated_values[:appeal_affinity]&.affinity_start_date, + moved_date_time: appeal["bfdlocin"], + bfcurloc: appeal["bfcurloc"] + } + end + + unsorted_result.sort_by { |appeal| appeal[:moved_date_time] }.reverse + end + + def self.calculate_field_values(appeal) + vlj_name = FullName.new(appeal["vlj_namef"], nil, appeal["vlj_namel"]).to_s + { + veteran_name: FullName.new(appeal["snamef"], nil, appeal["snamel"]).to_s, + hearing_judge_id: appeal["vlj"].blank? ? nil : legacy_hearing_judge(appeal), + hearing_judge_name: vlj_name.empty? ? nil : vlj_name, + deciding_judge_id: appeal["prev_deciding_judge"].blank? ? nil : legacy_original_deciding_judge(appeal), + deciding_judge_name: appeal["prev_deciding_judge"].blank? ? nil : legacy_original_deciding_judge_name(appeal), + appeal_affinity: AppealAffinity.find_by(case_id: appeal["bfkey"], case_type: "VACOLS::Case") + } + end + + def self.legacy_hearing_judge(appeal) + staff = VACOLS::Staff.find_by(sattyid: appeal["vlj"]) + staff&.sdomainid || appeal["vlj"] + end + + def self.legacy_original_deciding_judge(appeal) + staff = VACOLS::Staff.find_by(sattyid: appeal["prev_deciding_judge"]) + staff&.sdomainid || appeal["prev_deciding_judge"] + end + + def self.legacy_original_deciding_judge_name(appeal) + staff = VACOLS::Staff.find_by(sattyid: appeal["prev_deciding_judge"]) + deciding_judge_name = staff.nil? ? "" : FullName.new(staff["snamef"], nil, staff["snamel"]).to_s + deciding_judge_name.empty? ? nil : deciding_judge_name + end +end diff --git a/app/queries/appeals_ready_for_distribution.rb b/app/queries/appeals_ready_for_distribution.rb index 93934c75c8a..4a757857ef6 100644 --- a/app/queries/appeals_ready_for_distribution.rb +++ b/app/queries/appeals_ready_for_distribution.rb @@ -12,7 +12,8 @@ class AppealsReadyForDistribution target_distro_date: "Target Distro Date", days_before_goal_date: "Days Before Goal Date", hearing_judge: "Hearing Judge", - original_judge: "Original Deciding Judge", + original_judge_id: "Original Deciding Judge ID", + original_judge_name: "Original Deciding Judge", veteran_file_number: "Veteran File number", veteran_name: "Veteran", affinity_start_date: "Affinity Start Date" @@ -54,26 +55,37 @@ def self.ready_appeals def self.legacy_rows(appeals, sym) appeals.map do |appeal| - appeal_affinity = AppealAffinity.find_by(case_id: appeal["bfkey"], case_type: "VACOLS::Case") - - { - docket_number: appeal["tinum"], - docket: sym.to_s, - aod: appeal["aod"] == 1, - cavc: appeal["cavc"] == 1, - receipt_date: appeal["bfd19"], - ready_for_distribution_at: appeal["bfdloout"], - target_distro_date: "N/A", - days_before_goal_date: "N/A", - hearing_judge: FullName.new(appeal["vlj_namef"], nil, appeal["vlj_namel"]).to_s, - original_judge: appeal["prev_deciding_judge"].nil? ? nil : legacy_original_deciding_judge(appeal), - veteran_file_number: appeal["ssn"] || appeal["bfcorlid"], - veteran_name: FullName.new(appeal["snamef"], nil, appeal["snamel"]).to_s, - affinity_start_date: appeal_affinity&.affinity_start_date - } + build_legacy_appeal_row(appeal, sym) end end + def self.build_legacy_appeal_row(appeal, sym) + hearing_judge = FullName.new(appeal["vlj_namef"], nil, appeal["vlj_namel"]).to_s + veteran_name = FullName.new(appeal["snamef"], nil, appeal["snamel"]).to_s + + { + docket_number: appeal["tinum"], + docket: sym.to_s, + aod: appeal["aod"] == 1, + cavc: appeal["cavc"] == 1, + receipt_date: appeal["bfd19"], + ready_for_distribution_at: appeal["bfdloout"], + target_distro_date: "N/A", + days_before_goal_date: "N/A", + hearing_judge: hearing_judge, + original_judge_id: legacy_original_deciding_judge(appeal), + original_judge_name: legacy_original_deciding_judge_name(appeal), + veteran_file_number: appeal["ssn"] || appeal["bfcorlid"], + veteran_name: veteran_name, + affinity_start_date: fetch_affinity_start_date(appeal["bfkey"]) + } + end + + def self.fetch_affinity_start_date(case_id) + appeal_affinity = AppealAffinity.find_by(case_id: case_id, case_type: "VACOLS::Case") + appeal_affinity&.affinity_start_date + end + def self.ama_rows(appeals, docket, sym) appeals.map do |appeal| # This comes from the DistributionTask's assigned_at date @@ -81,6 +93,7 @@ def self.ama_rows(appeals, docket, sym) # only look for hearings that were held hearing_judge = with_held_hearings(appeal) priority_appeal = appeal.aod || appeal.cavc + { docket_number: appeal.docket_number, docket: sym.to_s, @@ -140,4 +153,9 @@ def self.legacy_original_deciding_judge(appeal) staff = VACOLS::Staff.find_by(sattyid: appeal["prev_deciding_judge"]) staff&.sdomainid || appeal["prev_deciding_judge"] end + + def self.legacy_original_deciding_judge_name(appeal) + staff = VACOLS::Staff.find_by(sattyid: appeal["prev_deciding_judge"]) + FullName.new(staff["snamef"], nil, staff["snamel"]).to_s if !staff.nil? + end end diff --git a/app/queries/appeals_tied_to_avljs_and_vljs_query.rb b/app/queries/appeals_tied_to_avljs_and_vljs_query.rb new file mode 100644 index 00000000000..12c8c1abb98 --- /dev/null +++ b/app/queries/appeals_tied_to_avljs_and_vljs_query.rb @@ -0,0 +1,165 @@ +# frozen_string_literal: true + +class AppealsTiedToAvljsAndVljsQuery + # define CSV headers and use this to pull fields to maintain order + + HEADERS = { + docket_number: "Docket number", + docket: "Docket type", + priority: "Priority", + receipt_date: "Receipt Date", + veteran_file_number: "File Number", + veteran_name: "Veteran Name", + vlj: "VLJ Name", + hearing_judge: "Most-recent hearing judge", + most_recent_signing_judge: "Most-recent judge who signed decision name (May be blank if no decision was signed)", + bfcurloc: "Current Location" + }.freeze + + def self.generate_rows(record) + HEADERS.keys.map { |key| record[key] } + end + + def self.process + # Convert results to CSV format + + CSV.generate(headers: true) do |csv| + # Add headers to CSV + csv << HEADERS.values + + # Iterate through results and add each row to CSV + tied_appeals.each do |record| + csv << generate_rows(record) + end + end + end + + # Uses DocketCoordinator to pull appeals ready for distribution + # DocketCoordinator is used by Automatic Case Distribution so this will give us the most accurate list of appeals + def self.tied_appeals + docket_coordinator = DocketCoordinator.new + + docket_coordinator.dockets + .flat_map do |sym, docket| + if sym == :legacy + appeals = docket.appeals_tied_to_avljs_and_vljs + unique_appeals = legacy_rows(appeals, sym).uniq { |record| record[:docket_number] } + + unique_appeals + else + appeals = docket.tied_to_vljs(vlj_user_ids) + + ama_rows(appeals, sym) + end + end + end + + def self.legacy_rows(appeals, sym) + appeals.map do |appeal| + calculated_values = calculate_field_values(appeal) + { + docket_number: appeal["tinum"], + docket: sym.to_s, + priority: appeal["priority"] == 1 ? "True" : "", + receipt_date: appeal["bfd19"], + veteran_file_number: calculated_values[:veteran_file_number], + veteran_name: calculated_values[:veteran_name], + vlj: calculated_values[:vlj], + hearing_judge: calculated_values[:hearing_judge], + most_recent_signing_judge: calculated_values[:most_recent_signing_judge], + bfcurloc: calculated_values[:bfcurloc] + } + end + end + + def self.ama_rows(appeals, sym) + appeals.map do |appeal| + # # This comes from the DistributionTask's assigned_at date + # ready_for_distribution_at = distribution_task_query(appeal) + # only look for hearings that were held + hearing_judge = ama_hearing_judge(appeal) + signing_judge = ama_cavc_original_deciding_judge(appeal) + { + docket_number: appeal.docket_number, + docket: sym.to_s, + priority: appeal.aod || appeal.cavc, + receipt_date: appeal.receipt_date, + veteran_file_number: appeal.veteran_file_number, + veteran_name: appeal.veteran&.name.to_s, + vlj: hearing_judge, + hearing_judge: hearing_judge, + most_recent_signing_judge: signing_judge, + bfcurloc: nil + } + end + end + + def self.vlj_user_ids + staff_domainids = VACOLS::Staff.where("svlj in ('A','J') AND sactive in ('A','I') ") + .pluck(:sdomainid) + .uniq + .compact + + User.where(css_id: staff_domainids).pluck(:id) + end + + def self.calculate_field_values(appeal) + vlj_name = get_vlj_name(appeal) + prev_judge_name = get_prev_judge_name(appeal) + vacols_case = VACOLS::Case.find_by(bfkey: appeal["bfkey"]) + veteran_record = VACOLS::Correspondent.find_by(stafkey: vacols_case.bfcorkey) + { + veteran_file_number: veteran_record.ssn || vacols_case&.bfcorlid, + veteran_name: get_name_from_record(veteran_record), + vlj: vlj_name, + hearing_judge: vlj_name, + most_recent_signing_judge: prev_judge_name, + bfcurloc: vacols_case&.bfcurloc + } + end + + def self.get_vlj_name(appeal) + if appeal["vlj"].nil? + vlj_name = nil + else + vlj_record = VACOLS::Staff.find_by(sattyid: appeal["vlj"]) + vlj_name = get_name_from_record(vlj_record) + end + + vlj_name + end + + def self.get_prev_judge_name(appeal) + if appeal["prev_deciding_judge"].nil? + prev_judge_name = nil + else + prev_judge_record = VACOLS::Staff.find_by(sattyid: appeal["prev_deciding_judge"]) + prev_judge_name = get_name_from_record(prev_judge_record) + end + + prev_judge_name + end + + def self.get_name_from_record(record) + FullName.new(record["snamef"], nil, record["snamel"]).to_s + end + + def self.ama_hearing_judge(appeal) + appeal.hearings + .filter { |hearing| hearing.disposition = Constants.HEARING_DISPOSITION_TYPES.held } + .first&.judge&.full_name + end + + def self.ama_cavc_original_deciding_judge(appeal) + return nil if appeal.cavc_remand.nil? + + source_appeal_id = CavcRemand.find_by(remand_appeal: appeal).source_appeal_id + judge_css_id = Task.find_by( + appeal_id: source_appeal_id, + appeal_type: Appeal.name, + type: JudgeDecisionReviewTask.name + )&.assigned_to&.css_id + + User.find_by_css_id(judge_css_id)&.full_name + end +end diff --git a/app/queries/appeals_tied_to_non_ssc_avlj_query.rb b/app/queries/appeals_tied_to_non_ssc_avlj_query.rb new file mode 100644 index 00000000000..62aff71e308 --- /dev/null +++ b/app/queries/appeals_tied_to_non_ssc_avlj_query.rb @@ -0,0 +1,113 @@ +# frozen_string_literal: true + +class AppealsTiedToNonSscAvljQuery + # define CSV headers and use this to pull fields to maintain order + + HEADERS = { + docket_number: "Docket number", + docket: "Docket type", + priority: "Priority", + receipt_date: "Receipt Date", + veteran_file_number: "File Number", + veteran_name: "Veteran Name", + non_ssc_avlj: "Non-SSC AVLJ's Name", + hearing_judge: "Most-recent hearing judge", + most_recent_signing_judge: "Most-recent judge who signed decision", + bfcurloc: "BFCURLOC" + }.freeze + + def self.generate_rows(record) + HEADERS.keys.map { |key| record[key] } + end + + def self.process + # Convert results to CSV format + + CSV.generate(headers: true) do |csv| + # Add headers to CSV + csv << HEADERS.values + + # Iterate through results and add each row to CSV + tied_appeals.each do |record| + csv << generate_rows(record) + end + end + end + + # Uses DocketCoordinator to pull appeals ready for distribution + # DocketCoordinator is used by Automatic Case Distribution so this will give us the most accurate list of appeals + def self.tied_appeals + docket_coordinator = DocketCoordinator.new + + docket_coordinator.dockets + .flat_map do |sym, docket| + if sym == :legacy + appeals = docket.appeals_tied_to_non_ssc_avljs + unique_appeals = legacy_rows(appeals, sym).uniq { |record| record[:docket_number] } + + unique_appeals + else + [] + end + end + end + + def self.legacy_rows(appeals, sym) + appeals.map do |appeal| + calculated_values = calculate_field_values(appeal) + { + docket_number: appeal["tinum"], + docket: sym.to_s, + priority: appeal["priority"] == 1 ? "True" : "", + receipt_date: appeal["bfd19"], + veteran_file_number: calculated_values[:veteran_file_number], + veteran_name: calculated_values[:veteran_name], + non_ssc_avlj: calculated_values[:non_ssc_avlj], + hearing_judge: calculated_values[:hearing_judge], + most_recent_signing_judge: calculated_values[:most_recent_signing_judge], + bfcurloc: calculated_values[:bfcurloc] + } + end + end + + def self.calculate_field_values(appeal) + avlj_name = get_avlj_name(appeal) + prev_judge_name = get_prev_judge_name(appeal) + vacols_case = VACOLS::Case.find_by(bfkey: appeal["bfkey"]) + veteran_record = VACOLS::Correspondent.find_by(stafkey: vacols_case.bfcorkey) + { + veteran_file_number: veteran_record.ssn || vacols_case&.bfcorlid, + veteran_name: get_name_from_record(veteran_record), + non_ssc_avlj: avlj_name, + hearing_judge: avlj_name, + most_recent_signing_judge: prev_judge_name, + bfcurloc: vacols_case&.bfcurloc + } + end + + def self.get_avlj_name(appeal) + if appeal["vlj"].nil? + avlj_name = nil + else + avlj_record = VACOLS::Staff.find_by(sattyid: appeal["vlj"]) + avlj_name = get_name_from_record(avlj_record) + end + + avlj_name + end + + def self.get_prev_judge_name(appeal) + if appeal["prev_deciding_judge"].nil? + prev_judge_name = nil + else + prev_judge_record = VACOLS::Staff.find_by(sattyid: appeal["prev_deciding_judge"]) + prev_judge_name = get_name_from_record(prev_judge_record) + end + + prev_judge_name + end + + def self.get_name_from_record(record) + FullName.new(record["snamef"], nil, record["snamel"]).to_s + end +end diff --git a/app/repositories/appeal_repository.rb b/app/repositories/appeal_repository.rb index ba473615590..a5385e95883 100644 --- a/app/repositories/appeal_repository.rb +++ b/app/repositories/appeal_repository.rb @@ -848,6 +848,7 @@ def distribute_nonpriority_appeals(judge, genpop, range, limit, bust_backlog) end end + # currently this is used for reporting needs def ready_to_distribute_appeals MetricsService.record("VACOLS: ready_to_distribute_appeals", name: "ready_to_distribute_appeals", @@ -856,6 +857,30 @@ def ready_to_distribute_appeals end end + def appeals_tied_to_non_ssc_avljs + MetricsService.record("VACOLS: appeals_tied_to_non_ssc_avljs", + name: "appeals_tied_to_non_ssc_avljs", + service: :vacols) do + VACOLS::CaseDocket.appeals_tied_to_non_ssc_avljs + end + end + + def appeals_tied_to_avljs_and_vljs + MetricsService.record("VACOLS: appeals_tied_to_avljs_and_vljs", + name: "appeals_tied_to_avljs_and_vljs", + service: :vacols) do + VACOLS::CaseDocket.appeals_tied_to_avljs_and_vljs + end + end + + def loc_63_appeals + MetricsService.record("VACOLS: loc_63_appeals", + name: "loc_63_appeals", + service: :vacols) do + VACOLS::CaseDocket.loc_63_appeals + end + end + private # NOTE: this should be called within a transaction where you are closing an appeal diff --git a/app/services/collectors/stats_collector.rb b/app/services/collectors/stats_collector.rb index 5eebb3a2ad5..f3148196c5c 100644 --- a/app/services/collectors/stats_collector.rb +++ b/app/services/collectors/stats_collector.rb @@ -11,7 +11,7 @@ def flatten_stats(metric_name_prefix, stats_hash) stats_hash.each do |metric_name, counts_hash| unless valid_metric_name?(metric_name) fail "Invalid metric name #{metric_name}; "\ - "see https://docs.datadoghq.com/developers/metrics/#naming-custom-metrics" + "see https://docs.dynatrace.com/docs/extend-dynatrace/extend-metrics/reference/custom-metric-metadata" end stats.concat add_tags_to_group_counts(metric_name_prefix, metric_name, counts_hash) @@ -29,7 +29,7 @@ def add_tags_to_group_counts(prefix, metric_name, group_counts) end end - # See valid tag name rules at https://docs.datadoghq.com/tagging/#defining-tags + # See valid tag name rules at https://docs.dynatrace.com/docs/manage/tags-and-metadata/setup/how-to-define-tags def to_valid_tag(name) name.gsub(/[^a-zA-Z_\-\:\.\d\/]/, "__") end @@ -41,7 +41,7 @@ def to_valid_tag_key(name) end def valid_metric_name?(metric_name) - # Actual limit is 200 but since the actual metric name in DataDog has + # Actual limit is 200 but since the actual metric name has # "dsva_appeals.stats_collector_job." prepended, let's just stick with a 150 character limit. return false if metric_name.length > 150 diff --git a/app/services/geomatch_service.rb b/app/services/geomatch_service.rb index 0d3e53a13e4..6d8645a5e1a 100644 --- a/app/services/geomatch_service.rb +++ b/app/services/geomatch_service.rb @@ -73,7 +73,6 @@ def record_geomatched_appeal(status) metric_name: "geomatched_appeals", attrs: { status: status, - appeal_external_id: appeal.external_id, hearing_request_type: appeal.current_hearing_request_type } ) diff --git a/app/services/hearings/reminder_service.rb b/app/services/hearings/reminder_service.rb index 936f60aff4c..4076b053f65 100644 --- a/app/services/hearings/reminder_service.rb +++ b/app/services/hearings/reminder_service.rb @@ -51,7 +51,6 @@ def send_to_metrics_service(type) metric_name: "emails.would_be_sent", attrs: { reminder_type: type, - hearing_id: hearing.id, request_type: hearing.hearing_request_type, hearing_type: hearing.class.name } diff --git a/app/services/metrics_service.rb b/app/services/metrics_service.rb index 08c2dbb68c4..82fff42f6d9 100644 --- a/app/services/metrics_service.rb +++ b/app/services/metrics_service.rb @@ -1,18 +1,14 @@ # frozen_string_literal: true require "benchmark" -require "datadog/statsd" require "statsd-instrument" # see https://dropwizard.github.io/metrics/3.1.0/getting-started/ for abstractions on metric types class MetricsService - @statsd = Datadog::Statsd.new - # :reek:LongParameterList - def self.increment_counter(metric_group:, metric_name:, app_name:, attrs: {}, by: 1) + def self.increment_counter(metric_group:, metric_name:, app_name:, attrs: {}) tags = get_tags(app_name, attrs) stat_name = get_stat_name(metric_group, metric_name) - @statsd.increment(stat_name, tags: tags, by: by) # Dynatrace statD implementation StatsD.increment(stat_name, tags: tags) @@ -34,7 +30,6 @@ def self.record_runtime(metric_group:, app_name:, start_time: Time.zone.now) def self.emit_gauge(metric_group:, metric_name:, metric_value:, app_name:, attrs: {}) tags = get_tags(app_name, attrs) stat_name = get_stat_name(metric_group, metric_name) - @statsd.gauge(stat_name, metric_value, tags: tags) # Dynatrace statD implementation StatsD.gauge(stat_name, metric_value, tags: tags) @@ -45,7 +40,6 @@ def self.emit_gauge(metric_group:, metric_name:, metric_value:, app_name:, attrs def self.histogram(metric_group:, metric_name:, metric_value:, app_name:, attrs: {}) tags = get_tags(app_name, attrs) stat_name = get_stat_name(metric_group, metric_name) - @statsd.histogram(stat_name, metric_value, tags: tags) # Dynatrace statD implementation StatsD.histogram(stat_name, metric_value, tags: tags) @@ -98,7 +92,6 @@ def self.record(description, service: nil, name: "unknown", caller: nil) } MetricsService.emit_gauge(sent_to_info) - sent_to << Metric::LOG_SYSTEMS[:datadog] sent_to << Metric::LOG_SYSTEMS[:dynatrace] end diff --git a/app/services/sqs_service.rb b/app/services/sqs_service.rb new file mode 100644 index 00000000000..9f56544df56 --- /dev/null +++ b/app/services/sqs_service.rb @@ -0,0 +1,79 @@ +# frozen_string_literal: true + +# A service class to aid in interacting with Caseflow's SQS queues. +class SqsService + class << self + # Intializes an SQS client, or returns a cached version if one has already been initialized. + # + # @return [Aws::SQS::Client] + # An SQS Client + def sqs_client + @sqs_client ||= initialize_sqs_client + end + + # Locates the URL for a SQS queue based on a provided substring. + # + # @param name [String] A substring of the queue's name being searched for. + # @param check_fifo [Boolean] Whether or not the queue being searched for should be for a FIFO queue. + # + # @return [String] The full URL of the SQS queue whose name contains the substring provided. + def find_queue_url_by_name(name:, check_fifo: false) + url = sqs_client.list_queues.queue_urls.find { _1.include?(name) && _1.include?(ENV["DEPLOY_ENV"]) } + + fail Caseflow::Error::SqsQueueNotFoundError, "The #{name} SQS queue is missing in this environment." unless url + + # Optional validation check + if check_fifo && !url.include?(".fifo") + fail Caseflow::Error::SqsUnexpectedQueueTypeError, "No FIFO queue with name #{name} could be located." + end + + url + end + + # Removes the messages provided from a specified queue. + # + # @param queue_url [String] The URL of the SQS queue that the messages will be deleted from. + # @param messages [Array] Messages to be deleted. + def batch_delete_messages(queue_url:, messages:) + messages.in_groups_of(10, false).flat_map do |msg_batch| + sqs_client.delete_message_batch({ + queue_url: queue_url, + entries: process_entries_for_batch_delete(msg_batch) + }) + end + end + + private + + # Intializes an SQS client. Takes into account SQS endpoint overrides and applies them + # to the instantiated client object. + # + # @return [Aws::SQS::Client] + # An SQS Client + def initialize_sqs_client + sqs_client = Aws::SQS::Client.new + + # Allow for overriding the endpoint requests are sent to via the Rails config. + if Rails.application.config.sqs_endpoint + sqs_client.config[:endpoint] = URI(Rails.application.config.sqs_endpoint) + end + + sqs_client + end + + # Prepares a batch of messages to be in the format needed for the SQS SDK's delete_message_batch method. + # + # @param unprocessed_entries [Array] Messages to be deleted. + # + # @return [Array] An array where each entry is a hash that contains a unique (per batch) + # id and a message's receipt handle. + def process_entries_for_batch_delete(unprocessed_entries) + unprocessed_entries.map.with_index do |msg, index| + { + id: "message_#{index}", + receipt_handle: msg.receipt_handle + } + end + end + end +end diff --git a/app/views/case_distribution_levers/index.html.erb b/app/views/case_distribution_levers/index.html.erb index f973dec10bb..c9ba0e386f4 100644 --- a/app/views/case_distribution_levers/index.html.erb +++ b/app/views/case_distribution_levers/index.html.erb @@ -8,7 +8,8 @@ acd_levers: @acd_levers, acd_history: @acd_history, user_is_an_acd_admin: @user_is_an_acd_admin, - acd_exclude_from_affinity: @acd_exclude_from_affinity + acd_exclude_from_affinity: @acd_exclude_from_affinity, + returnedAppealJobs: @returned_appeal_jobs }) %> <% end %> diff --git a/app/views/case_distribution_levers/test.html.erb b/app/views/case_distribution_levers/test.html.erb index 6f0fe70e96e..f6be3c5eb98 100644 --- a/app/views/case_distribution_levers/test.html.erb +++ b/app/views/case_distribution_levers/test.html.erb @@ -5,6 +5,7 @@ applicationUrls: application_urls, feedbackUrl: feedback_url, acdLevers: @acd_levers, - acdHistory: @acd_history + acdHistory: @acd_history, + returnedAppealJobs: @returned_appeal_jobs }) %> <% end %> diff --git a/app/views/layouts/_head.html.erb b/app/views/layouts/_head.html.erb index d502c1fb968..279790378b6 100644 --- a/app/views/layouts/_head.html.erb +++ b/app/views/layouts/_head.html.erb @@ -12,7 +12,6 @@ <%= render "layouts/head_sentry" %> diff --git a/app/views/layouts/_head_new_relic.html.erb b/app/views/layouts/_head_new_relic.html.erb deleted file mode 100644 index 067774b1547..00000000000 --- a/app/views/layouts/_head_new_relic.html.erb +++ /dev/null @@ -1,13 +0,0 @@ - - // To avoid sending PII to New Relic, we will disable its error logging functionality. - // In anecdotal testing, the error logging functionality didn't even seem to work - // particularly well. - // - // We wrap this in a conditional because newrelic will not be defined when browser - // monitoring is not enabled. This will occur in local development when the - // NEW_RELIC_AGENT_ENABLED env var is not set, for instance. - if (window.newrelic) { - window.newrelic.setErrorHandler(function() { - return true; - }); - } diff --git a/client/app/caseDistribution/components/CollapsibleTable.jsx b/client/app/caseDistribution/components/CollapsibleTable.jsx new file mode 100644 index 00000000000..81bd0f64741 --- /dev/null +++ b/client/app/caseDistribution/components/CollapsibleTable.jsx @@ -0,0 +1,68 @@ +import React, { useEffect, useState } from 'react'; +import PropTypes from 'prop-types'; + +const CollapsibleTable = (props) => { + const { returnedAppealJobs } = props; + const [expandedRows, setExpandedRows] = useState([]); + const [allExpanded, setAllExpanded] = useState(true); + + useEffect(() => { + const allRowIds = returnedAppealJobs.map((row) => row.id); + + setExpandedRows(allRowIds); + }, [returnedAppealJobs]); + + const toggleAllRows = () => { + + if (allExpanded) { + // Collapse all rows + setExpandedRows([]); + } else { + // Expand all rows + const allRowIds = returnedAppealJobs.map((row) => row.id); + + setExpandedRows(allRowIds); + } + setAllExpanded(!allExpanded); + }; + + const renderRowDetails = (row) => { + return ( + + {row.created_at} + {row.returned_appeals.join(', ')} + {JSON.parse(row.stats).message} + + ); + }; + + return ( +
+ + + + + + + + + + + {returnedAppealJobs.map((row) => ( + + {expandedRows.includes(row.id) && renderRowDetails(row)} + + ))} + +
Created AtReturned AppealsStats
+
+ ); +}; + +CollapsibleTable.propTypes = { + returnedAppealJobs: PropTypes.array, +}; + +export default CollapsibleTable; diff --git a/client/app/caseDistribution/test.jsx b/client/app/caseDistribution/test.jsx index 68b83c16f55..b570a804cdb 100644 --- a/client/app/caseDistribution/test.jsx +++ b/client/app/caseDistribution/test.jsx @@ -1,3 +1,4 @@ +/* eslint-disable max-lines */ /* eslint-disable react/prop-types */ import React from 'react'; @@ -11,6 +12,8 @@ import Footer from '@department-of-veterans-affairs/caseflow-frontend-toolkit/co import CaseSearchLink from '../components/CaseSearchLink'; import ApiUtil from '../util/ApiUtil'; import Button from '../components/Button'; +import Alert from 'app/components/Alert'; +import CollapsibleTable from './components/CollapsibleTable'; class CaseDistributionTest extends React.PureComponent { constructor(props) { @@ -19,20 +22,37 @@ class CaseDistributionTest extends React.PureComponent { isReseedingAod: false, isReseedingNonAod: false, isReseedingAmaDocketGoals: false, - isReseedingDocketPriority: false + isReseedingDocketPriority: false, + isReturnLegacyAppeals: false, + isFailReturnLegacyAppeals: false, + showLegacyAppealsAlert: false, + showAlert: false, + alertType: 'success', }; } + componentDidUpdate() { + // Delay of 5 seconds + setTimeout(() => { + this.setState({ showAlert: false, showLegacyAppealsAlert: false }); + }, 5000); + } + reseedAod = () => { this.setState({ isReseedingAod: true }); ApiUtil.post('/case_distribution_levers_tests/run_demo_aod_hearing_seeds').then(() => { this.setState({ isReseedingAod: false, + showAlert: true, + alertMsg: 'Successfully Completed Seeding Aod Hearing Held Appeals.', }); }, (err) => { console.warn(err); this.setState({ isReseedingAod: false, + showAlert: true, + alertMsg: err, + alertType: 'error', }); }); }; @@ -42,11 +62,16 @@ class CaseDistributionTest extends React.PureComponent { ApiUtil.post('/case_distribution_levers_tests/run_demo_non_aod_hearing_seeds').then(() => { this.setState({ isReseedingNonAod: false, + showAlert: true, + alertMsg: 'Successfully Completed Seeding Non Aod Hearing Held Appeals.', }); }, (err) => { console.warn(err); this.setState({ isReseedingNonAod: false, + showAlert: true, + alertMsg: err, + alertType: 'error', }); }); }; @@ -56,25 +81,74 @@ class CaseDistributionTest extends React.PureComponent { ApiUtil.post('/case_distribution_levers_tests/run-demo-ama-docket-goals').then(() => { this.setState({ isReseedingAmaDocketGoals: false, + showAlert: true, + alertMsg: 'Successfully Completed Seeding Ama Docket Time Goal Non Priority Appeals.', }); }, (err) => { console.warn(err); this.setState({ isReseedingAmaDocketGoals: false, + showAlert: true, + alertMsg: err, + alertType: 'error', }); }); }; reseedDocketPriority = () => { this.setState({ isReseedingDocketPriority: true }); - ApiUtil.post('/case_distribution_levers_tests/run-demo-docket-priority').then(() => { + ApiUtil.post('/case_distribution_levers_tests/run_demo_docket_priority').then(() => { this.setState({ isReseedingDocketPriority: false, + showAlert: true, + alertMsg: 'Successfully Completed Seeding Docket Type Appeals.', }); }, (err) => { console.warn(err); this.setState({ isReseedingDocketPriority: false, + showAlert: true, + alertMsg: err, + alertType: 'error', + }); + }); + }; + + reseedNonSSCAVLJAppeals = () => { + this.setState({ isReseedingNonSSCAVLJAppeals: true }); + ApiUtil.post('/case_distribution_levers_tests/run_demo_non_avlj_appeals').then(() => { + this.setState({ + isReseedingNonSSCAVLJAppeals: false, + showAlert: true, + alertMsg: 'Successfully Completed Seeding non-SSC AVLJ and Appeals.', + }); + }, (err) => { + console.warn(err); + this.setState({ + isReseedingNonSSCAVLJAppeals: false, + showAlert: true, + alertMsg: err, + alertType: 'error', + }); + }); + }; + + returnLegacyAppealsToBoard = () => { + this.setState({ isReturnLegacyAppeals: true }); + ApiUtil.post('/case_distribution_levers_tests/run_return_legacy_appeals_to_board').then(() => { + this.setState({ + isReturnLegacyAppeals: false, + showLegacyAppealsAlert: true, + legacyAppealsAlertType: 'success', + legacyAppealsAlertMsg: 'Successfully Completed Return Legacy Appeals To Board Job.', + }); + }, (err) => { + console.warn(err); + this.setState({ + isReturnLegacyAppeals: false, + showLegacyAppealsAlert: true, + legacyAppealsAlertType: 'error', + legacyAppealsAlertMsg: err }); }); }; @@ -137,6 +211,16 @@ class CaseDistributionTest extends React.PureComponent { +
  • + +
  • +
  • + + + +
  • Case Distribution Levers

    +
  • + + + +
  • +
  • + + + +
  • +
  • + + + +

  • Run Seed Files

    + { this.state.showAlert && + {this.state.alertMsg} + }
    • +
    • +
    • +
    +
    +

    Case Movement

    + { this.state.showLegacyAppealsAlert && + + {this.state.legacyAppealsAlertMsg} + + } +
      +
    • +

    +

    Log of 15 most recent appeals moved to location 63

    + +
    + ); }} /> diff --git a/client/app/hearings/components/dailyDocket/DailyDocketPrinted.jsx b/client/app/hearings/components/dailyDocket/DailyDocketPrinted.jsx index 1d794ddd4e1..bf0de26b02a 100644 --- a/client/app/hearings/components/dailyDocket/DailyDocketPrinted.jsx +++ b/client/app/hearings/components/dailyDocket/DailyDocketPrinted.jsx @@ -5,7 +5,7 @@ import _ from 'lodash'; import moment from 'moment'; import { getDate, getDisplayTime } from '../../../util/DateUtil'; -import { isPreviouslyScheduledHearing, sortHearings, dispositionLabel } from '../../utils'; +import { isPreviouslyScheduledHearing, sortHearings, dispositionLabel, timeWithTimeZone } from '../../utils'; import { openPrintDialogue } from '../../../util/PrintUtil'; import AOD_CODE_TO_LABEL_MAP from '../../../../constants/AOD_CODE_TO_LABEL_MAP'; import Table from '../../../components/Table'; @@ -27,9 +27,13 @@ export class DailyDocketPrinted extends React.Component { { header: 'Time', valueFunction: (hearing) => { + if (hearing.scheduledInTimezone) { + return timeWithTimeZone(hearing.scheduledFor, hearing.scheduledInTimezone); + } + const localTimezone = hearing.regionalOfficeTimezone || 'America/New_York'; - return getDisplayTime(hearing.scheduledTimeString, localTimezone); + return getDisplayTime(this.props.docket.scheduledFor, hearing.scheduledTimeString, localTimezone); } }, { diff --git a/client/app/hearings/utils.js b/client/app/hearings/utils.js index ef69f920b3d..b4a38677e5a 100644 --- a/client/app/hearings/utils.js +++ b/client/app/hearings/utils.js @@ -663,8 +663,12 @@ const calculateAvailableTimeslots = ({ }) => { // Extract the hearing time, add the hearing_day date from beginsAt, set the timezone be the ro timezone const hearingTimes = scheduledHearings.map((hearing) => { - const [hearingHour, hearingMinute] = hearing.hearingTime.split(':'); - const hearingTimeMoment = beginsAt.clone().set({ hour: hearingHour, minute: hearingMinute }); + const hearingClockTime = splitSelectedTime(hearing.hearingTime)[0]; + const parsedClockTime = moment(hearingClockTime, 'h:mm A'); + + const hearingTimeMoment = beginsAt.clone().set({ + hour: parsedClockTime.get('Hour'), minute: parsedClockTime.get('Minute') + }); // Change which zone the time is in but don't convert, "08:15 EDT" -> "08:15 PDT" return hearingTimeMoment.tz(roTimezone, true); @@ -725,11 +729,11 @@ const combineSlotsAndHearings = ({ roTimezone, availableSlots, scheduledHearings key: `${slot?.slotId}-${slot?.time_string}`, full: false, // This is a moment object, always in "America/New_York" - hearingTime: slot.time.format('HH:mm') + hearingTime: slot.time.format('HH:mm A') })); const formattedHearings = scheduledHearings.map((hearing) => { - const time = moment.tz(`${hearing?.hearingTime} ${hearingDayDate}`, 'HH:mm YYYY-MM-DD', roTimezone).clone(). + const time = moment.tz(`${hearing?.hearingTime} ${hearingDayDate}`, 'HH:mm A YYYY-MM-DD', roTimezone).clone(). tz('America/New_York'); return { @@ -740,7 +744,7 @@ const combineSlotsAndHearings = ({ roTimezone, availableSlots, scheduledHearings time, // The hearingTime is in roTimezone, but it looks like "09:30", this takes that "09:30" // in roTimezone, and converts it to Eastern zone because slots are always in eastern. - hearingTime: time.format('HH:mm') + hearingTime: time.format('HH:mm A') }; }); diff --git a/client/app/queue/components/NotificationTableColumns.jsx b/client/app/queue/components/NotificationTableColumns.jsx index 592e8c775af..e23aa966832 100644 --- a/client/app/queue/components/NotificationTableColumns.jsx +++ b/client/app/queue/components/NotificationTableColumns.jsx @@ -81,7 +81,7 @@ export const recipientInformationColumn = (notifications) => { tableData: notifications, valueName: 'Recipient Information', // eslint-disable-next-line no-negated-condition - valueFunction: (notification) => notification.status !== 'delivered' ? '—' : notification.recipient_information + valueFunction: (notification) => notification.recipient_information ?? '—' }; }; diff --git a/client/app/styles/caseDistribution/_test_seeds.scss b/client/app/styles/caseDistribution/_test_seeds.scss index a3f5292a83f..257a9e240f8 100644 --- a/client/app/styles/caseDistribution/_test_seeds.scss +++ b/client/app/styles/caseDistribution/_test_seeds.scss @@ -2,6 +2,7 @@ $seed-table-border-color: #d6d7d9; $seed-button-background-color: #0071bc; $seed-button-font-color: #fff; $seed-table-preview-bg-color: #f1f1f1; +$case-movement-button-bg-color: #07648d; .test-seeds-num-field { // width: auto; @@ -122,3 +123,11 @@ $seed-table-preview-bg-color: #f1f1f1; justify-content: flex-end; flex-direction: row; } + +.usa-button-case-movement { + background: $case-movement-button-bg-color; +} + +.usa-button-case-movement:hover { + background: $case-movement-button-bg-color; +} diff --git a/client/app/util/DateUtil.js b/client/app/util/DateUtil.js index 3cd690bbb34..24a5e7d8851 100644 --- a/client/app/util/DateUtil.js +++ b/client/app/util/DateUtil.js @@ -145,11 +145,11 @@ export const getDate = (date) => { return moment(date).format('YYYY-MM-DD'); }; -export const getDisplayTime = (scheduledTimeString, timezone) => { - const val = scheduledTimeString ? moment(scheduledTimeString, 'HH:mm').format('h:mm a') : ''; +export const getDisplayTime = (dateString, scheduledTimeString, timezone) => { + const val = scheduledTimeString ? moment(scheduledTimeString, 'HH:mm a').format('h:mm A') : ''; if (timezone) { - const tz = moment().tz(timezone). + const tz = moment(dateString).tz(timezone). format('z'); return `${val} ${tz}`; diff --git a/client/constants/ACD_LEVERS.json b/client/constants/ACD_LEVERS.json index 92eb0942637..67b22361cde 100644 --- a/client/constants/ACD_LEVERS.json +++ b/client/constants/ACD_LEVERS.json @@ -25,7 +25,8 @@ "affinity": "affinity", "docket_distribution_prior": "docket_distribution_prior", "docket_time_goal": "docket_time_goal", - "docket_levers": "docket_levers" + "docket_levers": "docket_levers", + "internal": "internal" }, "validation_error_message": { "minimum_not_met": "Please enter a value greater than or equal to 0", diff --git a/client/constants/DISTRIBUTION.json b/client/constants/DISTRIBUTION.json index d4b1809cd9c..f4bcaea1542 100644 --- a/client/constants/DISTRIBUTION.json +++ b/client/constants/DISTRIBUTION.json @@ -54,5 +54,10 @@ "disable_ama_priority_direct_review": "disable_ama_priority_direct_review", "disable_ama_priority_direct_review_title": "ACD Disable AMA Priority Direct Review", "disable_ama_priority_evidence_submission": "disable_ama_priority_evidence_submission", - "disable_ama_priority_evidence_submission_title": "ACD Disable AMA Priority Evidence Submission" + "disable_ama_priority_evidence_submission_title": "ACD Disable AMA Priority Evidence Submission", + "enable_nonsscavlj": "enable_nonsscavlj", + "enable_nonsscavlj_title": "Enable Non-SSC/AVLJ", + "no_records_moved_message": "Job Ran Successfully, No Records Moved", + "nonsscavlj_number_of_appeals_to_move": "nonsscavlj_number_of_appeals_to_move", + "nonsscavlj_number_of_appeals_to_move_title": "Non-SSC/AVLJ Number of Appeals to Move" } diff --git a/client/constants/QUARTERLY_STATUSES.json b/client/constants/QUARTERLY_STATUSES.json index e6df95484e1..c6891b266e8 100644 --- a/client/constants/QUARTERLY_STATUSES.json +++ b/client/constants/QUARTERLY_STATUSES.json @@ -4,7 +4,7 @@ "hearing_scheduled": "Hearing Scheduled", "privacy_pending": "Privacy Act Pending", "ihp_pending": "VSO IHP Pending", - "hearing_to_be_rescheduled": "Hearing to be Rescheduled", - "hearing_to_be_rescheduled_privacy_pending": "Hearing to be Rescheduled / Privacy Act Pending", + "hearing_to_be_rescheduled": "docketed", + "hearing_to_be_rescheduled_privacy_pending": "Privacy Act Pending", "appeal_docketed": "docketed" } diff --git a/client/constants/VA_NOTIFY_CONSTANTS.json b/client/constants/VA_NOTIFY_CONSTANTS.json new file mode 100644 index 00000000000..5c30dead852 --- /dev/null +++ b/client/constants/VA_NOTIFY_CONSTANTS.json @@ -0,0 +1,3 @@ +{ + "message_group_id": "VANotifyStatusUpdate" +} diff --git a/client/test/app/hearings/components/dailyDocket/DailyDocketPrinted.test.js b/client/test/app/hearings/components/dailyDocket/DailyDocketPrinted.test.js index 86eddf54b1e..01d4d85c8c8 100644 --- a/client/test/app/hearings/components/dailyDocket/DailyDocketPrinted.test.js +++ b/client/test/app/hearings/components/dailyDocket/DailyDocketPrinted.test.js @@ -48,4 +48,103 @@ describe('DailyDocketPrinted', () => { renderDailyDocketPrinted(mockProps); expect(await screen.queryByText(/Note:\s*This\s*is\s*a\s*note/)).not.toBeInTheDocument(); }); + + it('displays post meridiem time for DST time with scheduledInTimezone null', () => { + const mockProps = { + user: { userIsNonBoardEmployee: false }, + docket: { scheduledFor: '06-17-2024' }, + hearings: [ + { + scheduledTimeString: '3:30 PM Eastern Time (US & Canada)', + scheduledInTimezone: null, + regionalOfficeTimezone: 'America/New_York' + } + ] + }; + + renderDailyDocketPrinted(mockProps); + expect(screen.getByText('3:30 PM EDT')).toBeInTheDocument(); + }); + + it('displays post meridiem time for DST time with scheduledInTimezone provided', () => { + const mockProps = { + user: { userIsNonBoardEmployee: false }, + docket: { scheduledFor: '06-17-2024' }, + hearings: [ + { + scheduledFor: '2024-06-17T15:30:00.000-04:00', + scheduledInTimezone: 'America/New_York', + } + ] + }; + + renderDailyDocketPrinted(mockProps); + expect(screen.getByText('3:30 PM EDT')).toBeInTheDocument(); + }); + + it('displays post meridiem time in winter with scheduledInTimezone null', () => { + const mockProps = { + user: { userIsNonBoardEmployee: false }, + docket: { scheduledFor: '12-17-2024' }, + hearings: [ + { + scheduledTimeString: '3:30 PM Eastern Time (US & Canada)', + scheduledInTimezone: null, + regionalOfficeTimezone: 'America/New_York' + } + ] + }; + + renderDailyDocketPrinted(mockProps); + expect(screen.getByText('3:30 PM EST')).toBeInTheDocument(); + }); + + it('displays post meridiem time in winter with scheduledInTimezone provided', () => { + const mockProps = { + user: { userIsNonBoardEmployee: false }, + docket: { scheduledFor: '12-17-2024' }, + hearings: [ + { + scheduledFor: '2024-12-17T15:30:00.000-05:00', + scheduledInTimezone: 'America/New_York', + } + ] + }; + + renderDailyDocketPrinted(mockProps); + expect(screen.getByText('3:30 PM EST')).toBeInTheDocument(); + }); + + it('displays post meridiem time in summer with scheduledInTimezone null, ro timezone does not observe DST', () => { + const mockProps = { + user: { userIsNonBoardEmployee: false }, + docket: { scheduledFor: '06-17-2024' }, + hearings: [ + { + scheduledTimeString: '3:30 PM Hawaii', + scheduledInTimezone: null, + regionalOfficeTimezone: 'Pacific/Honolulu' + } + ] + }; + + renderDailyDocketPrinted(mockProps); + expect(screen.getByText('3:30 PM HST')).toBeInTheDocument(); + }); + + it('displays post meridiem time in summer with scheduledInTimezone provided, timezone does not observe DST', () => { + const mockProps = { + user: { userIsNonBoardEmployee: false }, + docket: { scheduledFor: '06-17-2024' }, + hearings: [ + { + scheduledFor: '2024-06-17T15:30:00.000-10:00', + scheduledInTimezone: 'Pacific/Honolulu', + } + ] + }; + + renderDailyDocketPrinted(mockProps); + expect(screen.getByText('3:30 PM HST')).toBeInTheDocument(); + }); }); diff --git a/client/test/app/hearings/components/scheduleHearing/TimeSlotButton.test.js b/client/test/app/hearings/components/scheduleHearing/TimeSlotButton.test.js index 6d8022fb9b0..9b7b75902d3 100644 --- a/client/test/app/hearings/components/scheduleHearing/TimeSlotButton.test.js +++ b/client/test/app/hearings/components/scheduleHearing/TimeSlotButton.test.js @@ -4,7 +4,7 @@ import { TimeSlotButton } from 'app/hearings/components/scheduleHearing/TimeSlot import { render } from '@testing-library/react'; import { roTimezones, formatTimeSlotLabel } from 'app/hearings/utils'; -const time = '08:15'; +const time = '15:15 PM'; const hearingDayDate = '2025-01-01'; const issueCount = 2; const poaName = 'Something'; diff --git a/client/test/app/hearings/components/scheduleHearing/__snapshots__/TimeSlotButton.test.js.snap b/client/test/app/hearings/components/scheduleHearing/__snapshots__/TimeSlotButton.test.js.snap index 9c8f1cd3ac4..fcc008289f6 100644 --- a/client/test/app/hearings/components/scheduleHearing/__snapshots__/TimeSlotButton.test.js.snap +++ b/client/test/app/hearings/components/scheduleHearing/__snapshots__/TimeSlotButton.test.js.snap @@ -14,7 +14,7 @@ Object {
    - 8:15 AM EST + 3:15 PM EST
    @@ -32,7 +32,7 @@ Object {
    - 8:15 AM EST + 3:15 PM EST
    @@ -108,7 +108,7 @@ Object {
    - 8:15 AM EST + 3:15 PM EST
    - 8:15 AM EST + 3:15 PM EST
    - 8:15 AM EST + 3:15 PM EST
    - 8:15 AM EST + 3:15 PM EST
    "true", + "FifoThroughputLimit" => "perQueue" + } + } +].freeze + +if Rails.application.config.sqs_create_queues + sqs_client = Aws::SQS::Client.new + sqs_client.config[:endpoint] = URI(Rails.application.config.sqs_endpoint) + + MESSAGE_QUEUES.each do |queue_info| + sqs_client.create_queue({ + queue_name: "#{QUEUE_PREFIX}#{queue_info[:name]}".to_sym, + attributes: queue_info[:attributes] + }) + end +end diff --git a/config/initializers/open_telemetry.rb b/config/initializers/open_telemetry.rb new file mode 100644 index 00000000000..1eaf079c9e9 --- /dev/null +++ b/config/initializers/open_telemetry.rb @@ -0,0 +1,73 @@ +# frozen_string_literal: true +require 'rubygems' +require 'bundler/setup' + +require 'opentelemetry/sdk' +require 'opentelemetry/exporter/otlp' + +require "opentelemetry-instrumentation-action_pack" +require "opentelemetry-instrumentation-action_view" +require "opentelemetry-instrumentation-active_job" +require "opentelemetry-instrumentation-active_record" +require "opentelemetry-instrumentation-active_support" +require "opentelemetry-instrumentation-aws_sdk" +require "opentelemetry-instrumentation-concurrent_ruby" +require "opentelemetry-instrumentation-faraday" +require "opentelemetry-instrumentation-http_client" +require "opentelemetry-instrumentation-net_http" +require "opentelemetry-instrumentation-pg" +require "opentelemetry-instrumentation-rack" +require "opentelemetry-instrumentation-rails" +require "opentelemetry-instrumentation-rake" +require "opentelemetry-instrumentation-redis" + +# rubocop:disable Layout/LineLength + +DT_API_URL = ENV["DT_API_URL"] +DT_API_TOKEN = ENV["DT_API_TOKEN"] + +Rails.logger.info("DT_API_TOKEN is set to #{DT_API_TOKEN}") + +if !Rails.env.development? && !Rails.env.test? && !Rails.env.demo? + OpenTelemetry::SDK.configure do |c| + c.service_name = 'caseflow' + c.service_version = '1.0.1' + + c.use 'OpenTelemetry::Instrumentation::ActiveRecord' + c.use 'OpenTelemetry::Instrumentation::Rack', { untraced_endpoints: ['/health-check', '/sample', '/logs'] } + c.use 'OpenTelemetry::Instrumentation::Rails' + + # c.use 'OpenTelemetry::Instrumentation::PG' + # c.use 'OpenTelemetry::Instrumentation::ActionView' + # c.use 'OpenTelemetry::Instrumentation::Redis' + + c.use 'OpenTelemetry::Instrumentation::ActionPack' + c.use 'OpenTelemetry::Instrumentation::ActiveSupport' + c.use 'OpenTelemetry::Instrumentation::ActiveJob' + c.use 'OpenTelemetry::Instrumentation::AwsSdk', { suppress_internal_instrumentation: true } + c.use 'OpenTelemetry::Instrumentation::ConcurrentRuby' + c.use 'OpenTelemetry::Instrumentation::Faraday' + c.use 'OpenTelemetry::Instrumentation::HttpClient' + c.use 'OpenTelemetry::Instrumentation::Net::HTTP' + + Rails.logger.info("Loaded instruments") + + %w[dt_metadata_e617c525669e072eebe3d0f08212e8f2.properties /var/lib/dynatrace/enrichment/dt_host_metadata.properties].each { |name| + begin + c.resource = OpenTelemetry::SDK::Resources::Resource.create(Hash[*File.read(name.start_with?("/var") ? name : File.read(name)).split(/[=\n]+/)]) + rescue + end + } + c.add_span_processor( + OpenTelemetry::SDK::Trace::Export::BatchSpanProcessor.new( + OpenTelemetry::Exporter::OTLP::Exporter.new( + endpoint: DT_API_URL + "/v1/traces", + headers: { + "Authorization": "Api-Token " + DT_API_TOKEN + } + ) + ) + ) + end +end + # rubocop:enable Layout/LineLength diff --git a/config/initializers/rack_context.rb b/config/initializers/rack_context.rb new file mode 100644 index 00000000000..974951fd6ed --- /dev/null +++ b/config/initializers/rack_context.rb @@ -0,0 +1,24 @@ +class RackContextGetter < OpenTelemetry::Context::Propagation::RackEnvGetter + + # :reek:FeatureEnvy + def get(carrier, key) + carrier[to_rack_key(key)] || carrier[key] + end + + protected + + def to_rack_key(key) + ret = +"HTTP_#{key}" + ret.tr!('-', '_') + ret.upcase! + ret + end +end + +RACK_ENV_GETTER = RackContextGetter.new + +OpenTelemetry::Common::Propagation.instance_eval do + def rack_env_getter + RACK_ENV_GETTER + end +end diff --git a/config/initializers/scheduled_jobs.rb b/config/initializers/scheduled_jobs.rb index 1f706fa375a..73293371f1d 100644 --- a/config/initializers/scheduled_jobs.rb +++ b/config/initializers/scheduled_jobs.rb @@ -39,7 +39,6 @@ "update_appellant_representation_job" => UpdateAppellantRepresentationJob, "update_cached_appeals_attributes_job" => UpdateCachedAppealsAttributesJob, "warm_bgs_caches_job" => WarmBgsCachesJob, - "va_notify_status_update_job" => VANotifyStatusUpdateJob, "poll_docketed_legacy_appeals_job" => PollDocketedLegacyAppealsJob, "retrieve_and_cache_reader_documents_job" => RetrieveAndCacheReaderDocumentsJob, "travel_board_hearing_sync_job" => Hearings::TravelBoardHearingSyncJob, diff --git a/config/initializers/shoryuken.rb b/config/initializers/shoryuken.rb index 84400df8a42..bc2377beab6 100644 --- a/config/initializers/shoryuken.rb +++ b/config/initializers/shoryuken.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "#{Rails.root}/app/jobs/middleware/job_monitoring_middleware" require "#{Rails.root}/app/jobs/middleware/job_request_store_middleware" require "#{Rails.root}/app/jobs/middleware/job_sentry_scope_middleware" @@ -9,16 +11,23 @@ .shoryuken_options(retry_intervals: [3.seconds, 30.seconds, 5.minutes, 30.minutes, 2.hours, 5.hours]) if Rails.application.config.sqs_endpoint - # override the sqs_endpoint Shoryuken::Client.sqs.config[:endpoint] = URI(Rails.application.config.sqs_endpoint) end if Rails.application.config.sqs_create_queues # create the development queues - Shoryuken::Client.sqs.create_queue({ queue_name: ActiveJob::Base.queue_name_prefix + '_low_priority' }) - Shoryuken::Client.sqs.create_queue({ queue_name: ActiveJob::Base.queue_name_prefix + '_high_priority' }) - Shoryuken::Client.sqs.create_queue({ queue_name: ActiveJob::Base.queue_name_prefix + '_send_notifications' }) - Shoryuken::Client.sqs.create_queue({ queue_name: ActiveJob::Base.queue_name_prefix + '_receive_notifications' }) + Shoryuken::Client.sqs.create_queue({ queue_name: ActiveJob::Base.queue_name_prefix + "_low_priority" }) + Shoryuken::Client.sqs.create_queue({ queue_name: ActiveJob::Base.queue_name_prefix + "_high_priority" }) + Shoryuken::Client.sqs.create_queue({ + queue_name: ( + ActiveJob::Base.queue_name_prefix + "_send_notifications.fifo" + ).to_sym, + attributes: { + "FifoQueue" => "true", + "FifoThroughputLimit" => "perQueue", + "ContentBasedDeduplication" => "false" + } + }) end Shoryuken.configure_server do |config| diff --git a/config/initializers/va_notify.rb b/config/initializers/va_notify.rb index de650eac2fc..80d05092136 100644 --- a/config/initializers/va_notify.rb +++ b/config/initializers/va_notify.rb @@ -1 +1,6 @@ -VANotifyService = (ApplicationController.dependencies_faked? ? Fakes::VANotifyService : ExternalApi::VANotifyService) \ No newline at end of file +case Rails.deploy_env +when :uat, :prod + VANotifyService = ExternalApi::VANotifyService +else + VANotifyService = Fakes::VANotifyService +end diff --git a/config/initializers/vacols_request_spy.rb b/config/initializers/vacols_request_spy.rb index 939f77dac4b..d79b92df7d6 100644 --- a/config/initializers/vacols_request_spy.rb +++ b/config/initializers/vacols_request_spy.rb @@ -32,8 +32,6 @@ def simulate_vacols_latency # $> REACT_ON_RAILS_ENV=HOT SIMULATE_VACOLS_LATENCY=true bundle exec rails s -p 3000 return unless ENV["SIMULATE_VACOLS_LATENCY"] - # Default determined from metrics sent to Datadog: - # https://app.datadoghq.com/dashboard/54w-efy-r5d/va-systems?fullscreen_widget=399796003 latency = ENV["VACOLS_DELAY_MS"] || 80 sleep(latency / 1000.0) end diff --git a/config/routes.rb b/config/routes.rb index ef67aeffb8e..3b14fac4202 100644 --- a/config/routes.rb +++ b/config/routes.rb @@ -30,11 +30,16 @@ get 'appeals_ready_to_distribute' get 'appeals_non_priority_ready_to_distribute' get 'appeals_distributed' + get 'appeals_in_location_63_in_past_2_days' get 'ineligible_judge_list' + get 'appeals_tied_to_non_ssc_avlj' + get 'appeals_tied_to_avljs_and_vljs' post 'run_demo_aod_hearing_seeds' post 'run_demo_non_aod_hearing_seeds' post 'run-demo-ama-docket-goals' - post 'run-demo-docket-priority' + post 'run_demo_non_avlj_appeals' + post 'run_demo_docket_priority' + post 'run_return_legacy_appeals_to_board' end end diff --git a/db/migrate/20240717034659_create_returned_appeal_jobs.rb b/db/migrate/20240717034659_create_returned_appeal_jobs.rb new file mode 100644 index 00000000000..35ac2499b65 --- /dev/null +++ b/db/migrate/20240717034659_create_returned_appeal_jobs.rb @@ -0,0 +1,13 @@ +class CreateReturnedAppealJobs < ActiveRecord::Migration[6.0] + def change + create_table :returned_appeal_jobs do |t| + t.timestamp :started_at + t.timestamp :completed_at + t.timestamp :errored_at + t.json :stats + t.text :returned_appeals, array: true, default: [] + + t.timestamps + end + end +end diff --git a/db/migrate/20240717145856_add_sm_sand_email_status_to_notifications.rb b/db/migrate/20240717145856_add_sm_sand_email_status_to_notifications.rb new file mode 100644 index 00000000000..ee83c718534 --- /dev/null +++ b/db/migrate/20240717145856_add_sm_sand_email_status_to_notifications.rb @@ -0,0 +1,6 @@ +class AddSmSandEmailStatusToNotifications < ActiveRecord::Migration[6.0] + def change + add_column :notifications, :sms_status_reason, :string, comment: "Context around why this VA Notify notification is in the sms status" + add_column :notifications, :email_status_reason, :string, comment: "Context around why this VA Notify notification is in the email status" + end +end diff --git a/db/schema.rb b/db/schema.rb index 3f0a705c442..3afc92fdb92 100644 --- a/db/schema.rb +++ b/db/schema.rb @@ -1450,6 +1450,7 @@ t.string "email_notification_content", comment: "Full Email Text Content of Notification" t.string "email_notification_external_id", comment: "VA Notify Notification Id for the email notification send through their API " t.string "email_notification_status", comment: "Status of the Email Notification" + t.string "email_status_reason", comment: "Context around why this VA Notify notification is in the email status" t.date "event_date", null: false, comment: "Date of Event" t.string "event_type", null: false, comment: "Type of Event" t.bigint "notifiable_id" @@ -1465,6 +1466,7 @@ t.string "sms_notification_status", comment: "Status of SMS/Text Notification" t.string "sms_response_content", comment: "Message body of the sms notification response." t.datetime "sms_response_time", comment: "Date and Time of the sms notification response." + t.string "sms_status_reason", comment: "Context around why this VA Notify notification is in the sms status" t.datetime "updated_at", comment: "TImestamp of when Notification was Updated" t.index ["appeals_id", "appeals_type"], name: "index_appeals_notifications_on_appeals_id_and_appeals_type" t.index ["email_notification_external_id"], name: "index_notifications_on_email_notification_external_id" @@ -1738,6 +1740,16 @@ t.index ["user_id"], name: "index_request_issues_updates_on_user_id" end + create_table "returned_appeal_jobs", force: :cascade do |t| + t.datetime "completed_at" + t.datetime "created_at", null: false + t.datetime "errored_at" + t.text "returned_appeals", default: [], array: true + t.datetime "started_at" + t.json "stats" + t.datetime "updated_at", null: false + end + create_table "schedule_periods", force: :cascade do |t| t.datetime "created_at", null: false t.date "end_date", null: false diff --git a/db/seeds/api_keys.rb b/db/seeds/api_keys.rb index 129c5fc8c86..0f27499b462 100644 --- a/db/seeds/api_keys.rb +++ b/db/seeds/api_keys.rb @@ -1,7 +1,5 @@ # frozen_string_literal: true -# create ApiKey seeds - module Seeds require "./app/models/api_key.rb" @@ -13,9 +11,10 @@ def seed! private def create_api_keys - ApiKey.create(consumer_name: "appeals_consumer", key_digest: "z1VxSVb2iae07+bYq8ZjQZs3ll4ZgSeVIUC9O5u+HfA=", - key_string: "5ecb5d7b440e429bb5fac331419c7e1a") + ApiKey.create!(consumer_name: "TestApiKey", key_string: "test") + ApiKey.create(consumer_name: "appeals_consumer", + key_digest: "z1VxSVb2iae07+bYq8ZjQZs3ll4ZgSeVIUC9O5u+HfA=", + key_string: "5ecb5d7b440e429bb5fac331419c7e1a") end end end - diff --git a/db/seeds/case_distribution_levers.rb b/db/seeds/case_distribution_levers.rb index 043ec9b2a8c..c83ba6d1f88 100644 --- a/db/seeds/case_distribution_levers.rb +++ b/db/seeds/case_distribution_levers.rb @@ -784,6 +784,30 @@ def levers } ] }, + { + item: Constants.DISTRIBUTION.enable_nonsscavlj, + title: Constants.DISTRIBUTION.enable_nonsscavlj_title, + description: "This is the internal lever used to enable and disable Non-SSC AVLJ work.", + data_type: Constants.ACD_LEVERS.data_types.boolean, + value: true, + unit: "", + is_disabled_in_ui: true, + algorithms_used: [], + lever_group: Constants.ACD_LEVERS.lever_groups.internal, + lever_group_order: 0 + }, + { + item: Constants.DISTRIBUTION.nonsscavlj_number_of_appeals_to_move, + title: Constants.DISTRIBUTION.nonsscavlj_number_of_appeals_to_move_title, + description: "This is the internal lever used to alter the number of appeals to be returned for Non-SSC AVLJs", + data_type: Constants.ACD_LEVERS.data_types.number, + value: 2, + unit: "", + is_disabled_in_ui: true, + algorithms_used: [], + lever_group: Constants.ACD_LEVERS.lever_groups.internal, + lever_group_order: 999 + }, ] end @@ -824,7 +848,6 @@ def full_update(item) # DANGER DANGER DANGER DANGER DANGER DANGER DANGER DANGER DANGER DANGER DANGER def full_update_lever(lever) existing_lever = CaseDistributionLever.find_by_item(lever[:item]) - existing_lever.update( title: lever[:title], description: lever[:description], diff --git a/db/seeds/non_ssc_avlj_legacy_appeals.rb b/db/seeds/non_ssc_avlj_legacy_appeals.rb new file mode 100644 index 00000000000..49286f69406 --- /dev/null +++ b/db/seeds/non_ssc_avlj_legacy_appeals.rb @@ -0,0 +1,472 @@ +# frozen_string_literal: true + +module Seeds + class NonSscAvljLegacyAppeals < Base + def initialize + # initialize_np_legacy_appeals_file_number_and_participant_id + # initialize_priority_legacy_appeals_file_number_and_participant_id + end + + def seed! + RequestStore[:current_user] = User.system_user + create_avljs + create_legacy_appeals + end + + private + + def create_avljs + create_non_ssc_avlj("NONSSCAN01", "Four Priority") + create_non_ssc_avlj("NONSSCAN02", "Four non-priority") + create_non_ssc_avlj("NONSSCAN03", "Four-pri h-and-d") + create_non_ssc_avlj("NONSSCAN04", "Four-non-pri h-and-d") + create_non_ssc_avlj("NONSSCAN05", "For-mix-of both-h-only") + create_non_ssc_avlj("NONSSCAN06", "For-mix-of both-h-and-d") + create_non_ssc_avlj("NONSSCAN07", "Do-not-get moved-pri") + create_non_ssc_avlj("NONSSCAN08", "Do-not-get moved-nonpri") + create_non_ssc_avlj("NONSSCAN09", "Do-not-get moved-mix") + create_non_ssc_avlj("NONSSCAN10", "Some-moved some-not") + create_ssc_avlj("SSCA11", "Does-not qualify for-mvmt") + create_non_ssc_avlj("NONSSCAN12", "Two-judges last-is-SSC") + create_non_ssc_avlj("NONSSCAN13", "Two-judges both-non-SSC") + create_inactive_non_ssc_avlj("NONSSCAN14", "Inactive Non") + create_vlj("REGVLJ01", "Regular VLJ1") + create_vlj("REGVLJ02", "Regular VLJ2") + create_non_ssc_avlj("SIGNAVLJLGC", "NonSSC Signing-AVLJ") + create_non_ssc_avlj("AVLJLGC2", "Alternate NonSSC-AVLJ") + create_ssc_avlj("SSCAVLJLGC", "SSC-Two-judges last-is-SSC") + end + + def create_legacy_appeals + # the naming comes from the acceptance criteria of APPEALS-45208 + create_four_priority_appeals_tied_to_a_non_ssc_avlj + create_four_non_priority_appeals_tied_to_a_non_ssc_avlj + create_four_priority_appeals_tied_to_and_signed_by_a_non_ssc_avlj + create_four_non_priority_appeals_tied_to_and_signed_by_a_non_ssc_avlj + create_four_alternating_priority_by_age_appeals_tied_to_a_non_ssc_avlj + create_four_alternating_priority_by_age_appeals_tied_to_and_signed_by_a_non_ssc_avlj + create_four_priority_appeals_tied_to_a_non_ssc_avlj_signed_by_another_avlj + create_four_non_priority_appeals_tied_to_a_non_ssc_avlj_signed_by_another_avlj + create_four_alternating_priority_by_age_appeals_tied_to_a_non_ssc_avlj_signed_by_another_avlj + create_two_sets_of_seven_types_of_appeals_tied_to_a_non_ssc_avlj + create_four_alternating_priority_by_age_appeals_tied_to_a_ssc_avlj + create_four_alternating_priority_by_age_appeals_tied_to_a_non_ssc_avlj_with_a_second_hearing_held_by_a_ssc_avlj + create_four_alternating_priority_by_age_appeals_tied_to_a_non_ssc_avlj_with_a_second_hearing_held_by_another_non_ssc_avlj + create_unsigned_priority_appeal_tied_to_inactive_non_ssc_avlj + create_signed_non_priority_appeal_tied_to_inactive_non_ssc_avlj + create_unsigned_priority_ama_appeal_tied_to_non_ssc_avlj + create_signed_non_priority_ama_appeal_tied_to_non_ssc_avlj + create_signed_priority_appeal_tied_to_vlj + create_unsigned_non_priority_appeal_tied_to_vlj + end + + def create_four_priority_appeals_tied_to_a_non_ssc_avlj + # A non-SSC AVLJ that Only has 4 priority cases where they held the last hearing + avlj = User.find_by(css_id: "NONSSCAN01") + create_legacy_appeal(priority=true, avlj, 300.days.ago) + create_legacy_appeal(priority=true, avlj, 200.days.ago) + create_legacy_appeal(priority=true, avlj, 100.days.ago) + create_legacy_appeal(priority=true, avlj, 30.days.ago) + end + + def create_four_non_priority_appeals_tied_to_a_non_ssc_avlj + # A non-SSC AVLJ that Only has 4 non-priority cases where they held the last hearing + avlj = User.find_by(css_id: "NONSSCAN02") + create_legacy_appeal(priority=false, avlj, 350.days.ago) + create_legacy_appeal(priority=false, avlj, 250.days.ago) + create_legacy_appeal(priority=false, avlj, 150.days.ago) + create_legacy_appeal(priority=false, avlj, 50.days.ago) + end + + def create_four_priority_appeals_tied_to_and_signed_by_a_non_ssc_avlj + assigned_avlj = User.find_by(css_id: "NONSSCAN03") + signing_avlj = User.find_by(css_id: "NONSSCAN03") + create_signed_legacy_appeal(priority=true, signing_avlj, assigned_avlj, 100.days.ago) + create_signed_legacy_appeal(priority=true, signing_avlj, assigned_avlj, 80.days.ago) + create_signed_legacy_appeal(priority=true, signing_avlj, assigned_avlj, 60.days.ago) + create_signed_legacy_appeal(priority=true, signing_avlj, assigned_avlj, 30.days.ago) + end + + def create_four_non_priority_appeals_tied_to_and_signed_by_a_non_ssc_avlj + # A non-SSC AVLJ that Only has 4 non-priority cases where they held the last hearing and signed the most recent decision + assigned_avlj = User.find_by(css_id: "NONSSCAN04") + signing_avlj = User.find_by(css_id: "NONSSCAN04") + create_signed_legacy_appeal(priority=false, signing_avlj, assigned_avlj, 110.days.ago) + create_signed_legacy_appeal(priority=false, signing_avlj, assigned_avlj, 90.days.ago) + create_signed_legacy_appeal(priority=false, signing_avlj, assigned_avlj, 70.days.ago) + create_signed_legacy_appeal(priority=false, signing_avlj, assigned_avlj, 40.days.ago) + end + + def create_four_alternating_priority_by_age_appeals_tied_to_a_non_ssc_avlj + # A non-SSC AVLJ that Has 4 in alternating order by age of BRIEFF.BFD19 (Docket Date) + # priority cases where they held the last hearing + # non-priority cases where they held the last hearing + avlj = User.find_by(css_id: "NONSSCAN05") + create_legacy_appeal(priority=false, avlj, 600.days.ago) #oldest + create_legacy_appeal(priority=true, avlj, 425.days.ago) + create_legacy_appeal(priority=false, avlj, 400.days.ago) + create_legacy_appeal(priority=true, avlj, 40.days.ago) #most recent + end + + def create_four_alternating_priority_by_age_appeals_tied_to_and_signed_by_a_non_ssc_avlj + # A non-SSC AVLJ that Has 4 in alternating order by age of BRIEFF.BFD19 (Docket Date) + # priority cases where they held the last hearing and signed the most recent decision + # non-priority cases where they held the last hearing and signed the most recent decision + signing_avlj = User.find_by(css_id: "NONSSCAN06") + assigned_avlj = User.find_by(css_id: "NONSSCAN06") + create_signed_legacy_appeal(priority=false, signing_avlj, assigned_avlj, 120.days.ago) #oldest + create_signed_legacy_appeal(priority=true, signing_avlj, assigned_avlj, 110.days.ago) + create_signed_legacy_appeal(priority=false, signing_avlj, assigned_avlj, 100.days.ago) + create_signed_legacy_appeal(priority=true, signing_avlj, assigned_avlj, 50.days.ago) #most recent + end + + def create_four_priority_appeals_tied_to_a_non_ssc_avlj_signed_by_another_avlj + # A non-SSC AVLJ that Only has 4 priority cases where they held the last hearing and did NOT sign the most recent decision + # These cases should NOT be returned to the board + assigned_avlj = User.find_by(css_id: "NONSSCAN07") + signing_avlj = User.find_by(css_id: "SIGNAVLJLGC") + create_signed_legacy_appeal(priority=true, signing_avlj, assigned_avlj, 120.days.ago) + create_signed_legacy_appeal(priority=true, signing_avlj, assigned_avlj, 110.days.ago) + create_signed_legacy_appeal(priority=true, signing_avlj, assigned_avlj, 100.days.ago) + create_signed_legacy_appeal(priority=true, signing_avlj, assigned_avlj, 50.days.ago) + end + + def create_four_non_priority_appeals_tied_to_a_non_ssc_avlj_signed_by_another_avlj + # A non-SSC AVLJ that Only has 4 non-priority cases where they held the last hearing and did NOT sign the most recent decision + # These cases should NOT be returned to the board + assigned_avlj = User.find_by(css_id: "NONSSCAN08") + signing_avlj = User.find_by(css_id: "SIGNAVLJLGC") + create_signed_legacy_appeal(priority=false, signing_avlj, assigned_avlj, 120.days.ago) + create_signed_legacy_appeal(priority=false, signing_avlj, assigned_avlj, 110.days.ago) + create_signed_legacy_appeal(priority=false, signing_avlj, assigned_avlj, 100.days.ago) + create_signed_legacy_appeal(priority=false, signing_avlj, assigned_avlj, 50.days.ago) + end + + def create_four_alternating_priority_by_age_appeals_tied_to_a_non_ssc_avlj_signed_by_another_avlj + # A non-SSC AVLJ that Has 4 in alternating order by age of BRIEFF.BFD19 (Docket Date) + # priority cases where they held the last hearing and did NOT sign the most recent decision + # These cases should NOT be returned to the board + # non-priority cases where they held the last hearing and did NOT sign the most recent decision + # These cases should NOT be returned to the board + assigned_avlj = User.find_by(css_id: "NONSSCAN09") + signing_avlj = User.find_by(css_id: "SIGNAVLJLGC") + create_signed_legacy_appeal(priority=false, signing_avlj, assigned_avlj, 220.days.ago) #oldest + create_signed_legacy_appeal(priority=true, signing_avlj, assigned_avlj, 210.days.ago) + create_signed_legacy_appeal(priority=false, signing_avlj, assigned_avlj, 200.days.ago) + create_signed_legacy_appeal(priority=true, signing_avlj, assigned_avlj, 150.days.ago) #most recent + end + + def create_two_sets_of_seven_types_of_appeals_tied_to_a_non_ssc_avlj + # A non-SSC AVLJ that Has 12 appeals + # Notes + # Cycle through the groups before creating the second appeal in the group, make each created appeal newer by BRIEFF.BFD19 (Docket Date) than the previous one + # Appeals in the same group should not be grouped next to each other + # appeals + # 1. priority cases where they held the last hearing and did NOT sign the most recent decision + # These cases should NOT be returned to the board + # 2. non-priority cases where they held the last hearing and did NOT sign the most recent decision + # These cases should NOT be returned to the board + # 3. priority cases where they held the last hearing + # 4. non-priority cases where they held the last hearing + # 5. priority cases where they held the last hearing and signed the most recent decision + # 6. non-priority cases where they held the last hearing and signed the most recent decision + # 7. has an appeal with a hearing where they were the judge but the appeal is NOT ready to distribute + # This case would NOT show up in the ready to distribute query, but we could look it up by veteran ID to verify that it didn't get moved. + + assigned_avlj = User.find_by(css_id: "NONSSCAN10") + signing_avlj = User.find_by(css_id: "SIGNAVLJLGC") + create_signed_legacy_appeal(priority=false, signing_avlj, assigned_avlj, 220.days.ago) #oldest + create_signed_legacy_appeal(priority=true, signing_avlj, assigned_avlj, 210.days.ago) + create_legacy_appeal(priority=true, assigned_avlj, 200.days.ago) + create_legacy_appeal(priority=false, assigned_avlj, 190.days.ago) + create_signed_legacy_appeal(priority=false, assigned_avlj, assigned_avlj, 180.days.ago) + create_signed_legacy_appeal(priority=true, assigned_avlj, assigned_avlj, 170.days.ago) + legacy_appeal = create_legacy_appeal(priority=true, assigned_avlj, 160.days.ago) + make_legacy_appeal_not_ready_for_distribution(legacy_appeal) + + create_signed_legacy_appeal(priority=false, signing_avlj, assigned_avlj, 150.days.ago) + create_signed_legacy_appeal(priority=true, signing_avlj, assigned_avlj, 140.days.ago) + create_legacy_appeal(priority=true, assigned_avlj, 130.days.ago) + create_legacy_appeal(priority=false, assigned_avlj, 120.days.ago) + create_signed_legacy_appeal(priority=false, assigned_avlj, assigned_avlj, 110.days.ago) + create_signed_legacy_appeal(priority=true, assigned_avlj, assigned_avlj, 100.days.ago) + legacy_appeal = create_legacy_appeal(priority=true, assigned_avlj, 90.days.ago) + make_legacy_appeal_not_ready_for_distribution(legacy_appeal)#most recent + end + + def create_four_alternating_priority_by_age_appeals_tied_to_a_ssc_avlj + # A SSC AVLJ that has 4 appeals for which they held the last hearing. + # These cases should NOT be returned to the board + ssc_avlj = User.find_by(css_id: "SSCA11") + create_legacy_appeal(priority=true, ssc_avlj, 325.days.ago) + create_legacy_appeal(priority=false, ssc_avlj, 275.days.ago) + create_legacy_appeal(priority=true, ssc_avlj, 175.days.ago) + create_legacy_appeal(priority=false, ssc_avlj, 75.days.ago) + end + + def create_four_alternating_priority_by_age_appeals_tied_to_a_non_ssc_avlj_with_a_second_hearing_held_by_a_ssc_avlj + # A non-SSC AVLJ that has 4 appeals where the non-SSC AVLJ held a hearing first, but a second hearing was held by an SSC AVLJ. + # These cases should NOT be returned to the board + avlj = User.find_by(css_id: "NONSSCAN12") + ssc_avlj = User.find_by(css_id: "SSCAVLJLGC") + legacy_appeal = create_legacy_appeal(priority=true, avlj, 90.days.ago) + create_second_hearing_for_legacy_appeal(legacy_appeal, 90.days.ago, ssc_avlj) + + legacy_appeal = create_legacy_appeal(priority=false, avlj, 60.days.ago) + create_second_hearing_for_legacy_appeal(legacy_appeal, 30.days.ago, ssc_avlj) + + legacy_appeal = create_legacy_appeal(priority=true, avlj, 30.days.ago) + create_second_hearing_for_legacy_appeal(legacy_appeal, 15.days.ago, ssc_avlj) + + legacy_appeal = create_legacy_appeal(priority=false, avlj, 15.days.ago) + create_second_hearing_for_legacy_appeal(legacy_appeal, 5.days.ago, ssc_avlj) + end + + def create_four_alternating_priority_by_age_appeals_tied_to_a_non_ssc_avlj_with_a_second_hearing_held_by_another_non_ssc_avlj + # A non-SSC AVLJ that has 4 appeals where the non-SSC AVLJ held a hearing first, but a second hearing was held by different non-SSC AVLJ. + avlj = User.find_by(css_id: "NONSSCAN13") + avlj2 = User.find_by(css_id: "AVLJLGC2") + legacy_appeal = create_legacy_appeal(priority=true, avlj, 95.days.ago) + create_second_hearing_for_legacy_appeal(legacy_appeal, 65.days.ago, avlj2) + + legacy_appeal = create_legacy_appeal(priority=false, avlj, 65.days.ago) + create_second_hearing_for_legacy_appeal(legacy_appeal, 35.days.ago, avlj2) + + legacy_appeal = create_legacy_appeal(priority=true, avlj, 35.days.ago) + create_second_hearing_for_legacy_appeal(legacy_appeal, 25.days.ago, avlj2) + + legacy_appeal = create_legacy_appeal(priority=false, avlj, 20.days.ago) + create_second_hearing_for_legacy_appeal(legacy_appeal, 10.days.ago, avlj2) + end + + def create_unsigned_priority_appeal_tied_to_inactive_non_ssc_avlj + inactive_avlj = User.find_by(css_id: "NONSSCAN14") + docket_date = Date.new(1999, 1, 1) + create_legacy_appeal(priority=true, inactive_avlj, docket_date) + end + + def create_signed_non_priority_appeal_tied_to_inactive_non_ssc_avlj + inactive_avlj = User.find_by(css_id: "NONSSCAN14") + docket_date = Date.new(1999, 1, 2) + create_signed_legacy_appeal(priority=false, inactive_avlj, inactive_avlj, docket_date) + end + + def create_unsigned_priority_ama_appeal_tied_to_non_ssc_avlj + non_ssc_avlj = User.find_by(css_id: "NONSSCAN01") + docket_date = Date.new(2020, 1, 3) + create_ama_appeal(priority=true, non_ssc_avlj, docket_date) + end + + def create_signed_non_priority_ama_appeal_tied_to_non_ssc_avlj + non_ssc_avlj = User.find_by(css_id: "NONSSCAN01") + docket_date = Date.new(2020, 1, 4) + create_signed_ama_appeal(priority=false, non_ssc_avlj, non_ssc_avlj, docket_date) + end + + def create_signed_priority_appeal_tied_to_vlj + vlj = User.find_by(css_id: "REGVLJ01") + docket_date = Date.new(1999, 1, 5) + create_signed_legacy_appeal(priority=true, vlj, vlj, docket_date) + end + + def create_unsigned_non_priority_appeal_tied_to_vlj + vlj = User.find_by(css_id: "REGVLJ02") + docket_date = Date.new(1999, 1, 6) + create_legacy_appeal(priority=false, vlj, docket_date) + end + + def create_non_ssc_avlj(css_id, full_name) + User.find_by_css_id(css_id) || + create(:user, :non_ssc_avlj_user, css_id: css_id, full_name: full_name) + end + + def create_ssc_avlj(css_id, full_name) + User.find_by_css_id(css_id) || + create(:user, :ssc_avlj_user, css_id: css_id, full_name: full_name) + end + + def create_inactive_non_ssc_avlj(css_id, full_name) + # same as a regular non_ssc_avlj except their sactive = 'I' instead of 'A' + User.find_by_css_id(css_id) || + create(:user, :inactive_non_ssc_avlj_user, css_id: css_id, full_name: full_name) + end + + def create_vlj(css_id, full_name) + # same as a ssc_avlj except thier svlj = 'J' instead of 'A' + User.find_by_css_id(css_id) || + create(:user, :vlj_user, css_id: css_id, full_name: full_name) + end + + def demo_regional_office + 'RO17' + end + + def create_signed_legacy_appeal(priority, signing_avlj, assigned_avlj, docket_date) + Timecop.travel(docket_date) do + traits = priority ? [:type_cavc_remand] : [:type_original] + create(:legacy_signed_appeal, *traits, signing_avlj: signing_avlj, assigned_avlj: assigned_avlj) + end + end + + def create_legacy_appeal(priority, avlj, docket_date) + Timecop.travel(docket_date) + veteran = create_demo_veteran_for_legacy_appeal + + correspondent = create(:correspondent, + snamef: veteran.first_name, snamel: veteran.last_name, + ssalut: "", ssn: veteran.file_number) + + + vacols_case = priority ? create_priority_video_vacols_case(veteran, + correspondent, + avlj, + docket_date) : + create_non_priority_video_vacols_case(veteran, + correspondent, + avlj, + docket_date) + + legacy_appeal = create( + :legacy_appeal, + :with_root_task, + vacols_case: vacols_case, + closest_regional_office: demo_regional_office + ) + + create(:available_hearing_locations, demo_regional_office, appeal: legacy_appeal) + Timecop.return + + legacy_appeal + end + + def create_ama_appeal(priority, avlj, docket_date) + Timecop.travel(docket_date) + priority ? create( + :appeal, + :hearing_docket, + :with_post_intake_tasks, + :advanced_on_docket_due_to_age, + :held_hearing_and_ready_to_distribute, + :tied_to_judge, + veteran: create_demo_veteran_for_legacy_appeal, + receipt_date: docket_date, + tied_judge: avlj, + adding_user: avlj + ) : create( + :appeal, + :hearing_docket, + :with_post_intake_tasks, + :held_hearing_and_ready_to_distribute, + :tied_to_judge, + veteran: create_demo_veteran_for_legacy_appeal, + receipt_date: docket_date, + tied_judge: avlj, + adding_user: avlj + ) + Timecop.return + end + + def create_signed_ama_appeal(priority, avlj, signing_avlj, docket_date) + + # Go back to when we want the original appeal to have been decided + Timecop.travel(docket_date) + + source = create(:appeal, :dispatched, :hearing_docket, associated_judge: avlj) + remand = create(:cavc_remand, source_appeal: source).remand_appeal + remand.tasks.where(type: SendCavcRemandProcessedLetterTask.name).map(&:completed!) + create(:appeal_affinity, appeal: remand) + + jat = JudgeAssignTaskCreator.new(appeal: remand, judge: avlj, assigned_by_id: avlj.id).call + create(:colocated_task, :schedule_hearing, parent: jat, assigned_by: avlj).completed! + + create(:hearing, :held, appeal: remand, judge: avlj, adding_user: User.system_user) + remand.tasks.where(type: AssignHearingDispositionTask.name).flat_map(&:children).map(&:completed!) + remand.appeal_affinity.update!(affinity_start_date: Time.zone.now) + + remand + Timecop.return + end + + + def create_priority_video_vacols_case(veteran, correspondent, associated_judge, days_ago) + create( + :case, + :aod, + :tied_to_judge, + :video_hearing_requested, + :type_original, + :ready_for_distribution, + tied_judge: associated_judge, + correspondent: correspondent, + bfcorlid: "#{veteran.file_number}S", + case_issues: create_list(:case_issue, 3, :compensation), + bfd19: days_ago + ) + end + + def create_non_priority_video_vacols_case(veteran, correspondent, associated_judge, days_ago) + create( + :case, + :tied_to_judge, + :video_hearing_requested, + :type_original, + :ready_for_distribution, + tied_judge: associated_judge, + correspondent: correspondent, + bfcorlid: "#{veteran.file_number}S", + case_issues: create_list(:case_issue, 3, :compensation), + bfd19: days_ago + ) + end + + def random_demo_file_number_and_participant_id + random_file_number = Random.rand(100_000_000...989_999_999) + random_participant_id = random_file_number + 100000 + + while find_demo_veteran(random_file_number) + random_file_number += 2000 + random_participant_id += 2000 + end + + return random_file_number, random_participant_id + end + + def find_demo_veteran(file_number) + Veteran.find_by(file_number: format("%09d", n: file_number + 1)) + end + + def create_demo_veteran(options = {}) + params = { + file_number: format("%09d", n: options[:file_number]), + participant_id: format("%09d", n: options[:participant_id]) + } + + Veteran.find_by_participant_id(params[:participant_id]) || create(:veteran, params.merge(options)) + end + + def create_demo_veteran_for_legacy_appeal + file_number, participant_id = random_demo_file_number_and_participant_id + create_demo_veteran( + file_number: file_number, + participant_id: participant_id + ) + end + + def create_second_hearing_for_legacy_appeal(legacy_appeal, docket_date, avlj) + case_hearing = create( + :case_hearing, + :disposition_held, + folder_nr: legacy_appeal.vacols_id, + hearing_date: docket_date.to_date, + user: avlj + ) + + create(:legacy_hearing, appeal: legacy_appeal, case_hearing: case_hearing) + end + + def make_legacy_appeal_not_ready_for_distribution(legacy_appeal) + Rails.logger.info("~~~Marking legacy appeal for Veteran ID: #{legacy_appeal.vbms_id} as Not Ready To Distribute~~~") + VACOLS::Case.find(legacy_appeal.vacols_id).update!(bfcurloc: "01") + end + end +end diff --git a/docker-bin/build.sh b/docker-bin/build.sh index 797ebaff5ab..435d00482d5 100755 --- a/docker-bin/build.sh +++ b/docker-bin/build.sh @@ -41,8 +41,6 @@ fi cd ../../ printf "commit: `git rev-parse HEAD`\ndate: `git log -1 --format=%cd`" > config/build_version.yml -credstash -t appeals-credstash get datadog.api.key > config/datadog.key - cp /etc/ssl/certs/ca-certificates.crt docker-bin/ca-certs/cacert.pem # Build Docker @@ -50,7 +48,6 @@ echo -e "\tCreating Caseflow App Docker Image" docker build -t caseflow . result=$? echo -e "\tCleaning Up..." -rm -rf config/datadog.key rm -rf docker-bin/oracle_libs if [ $result == 0 ]; then echo -e "\tBuilding Caseflow Docker App: Completed" diff --git a/docker-bin/startup.sh b/docker-bin/startup.sh index 5689eecc68d..238677951e6 100644 --- a/docker-bin/startup.sh +++ b/docker-bin/startup.sh @@ -8,12 +8,6 @@ source $THIS_SCRIPT_DIR/env.sh echo "Start DBus" dbus-daemon --system -echo "Start Datadog" -nohup /opt/datadog-agent/bin/agent/agent run -p /opt/datadog-agent/run/agent.pid > dd-agent.out & -nohup /opt/datadog-agent/embedded/bin/trace-agent --config /etc/datadog-agent/datadog.yaml --pid /opt/datadog-agent/run/trace-agent.pid > dd-trace.out & -nohup /opt/datadog-agent/embedded/bin/system-probe --config=/etc/datadog-agent/system-probe.yaml --pid=/opt/datadog-agent/run/system-probe.pid > dd-probe.out & -nohup /opt/datadog-agent/embedded/bin/process-agent --config=/etc/datadog-agent/datadog.yaml --sysprobe-config=/etc/datadog-agent/system-probe.yaml --pid=/opt/datadog-agent/run/process-agent.pid > dd-system-probe.out & - echo "Waiting for dependencies to properly start up - 240 seconds" date sleep 240 diff --git a/docker-compose-m1.yml b/docker-compose-m1.yml index 6d0f25a0f2b..7f8da3a3074 100644 --- a/docker-compose-m1.yml +++ b/docker-compose-m1.yml @@ -19,10 +19,9 @@ services: appeals-localstack-aws: platform: linux/amd64 container_name: localstack - image: localstack/localstack:0.11.4 + image: localstack/localstack:0.14.5 ports: - - "4567-4583:4567-4583" - - "8082:${PORT_WEB_UI-8080}" + - "4566:4566" environment: - SERVICES=sqs volumes: diff --git a/docker-compose.yml b/docker-compose.yml index 02ec69bc364..0ecd5d4fc06 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -24,10 +24,9 @@ services: appeals-localstack-aws: container_name: localstack - image: localstack/localstack:0.11.4 + image: localstack/localstack:0.14.5 ports: - - "4567-4583:4567-4583" - - "8082:${PORT_WEB_UI-8080}" + - "4566:4566" environment: - SERVICES=sqs volumes: diff --git a/lib/caseflow/error.rb b/lib/caseflow/error.rb index c60dc7bd5e9..69f46c0bdfb 100644 --- a/lib/caseflow/error.rb +++ b/lib/caseflow/error.rb @@ -510,4 +510,8 @@ def initialize(msg = "The batch size of jobs must not exceed 10") super(msg) end end + + class SqsUnexpectedQueueTypeError < StandardError; end + class SqsQueueNotFoundError < StandardError; end + class SqsQueueExhaustionError < StandardError; end end diff --git a/lib/fakes/va_notify_service.rb b/lib/fakes/va_notify_service.rb index 1dc5d00999a..e8b9cb98d7b 100644 --- a/lib/fakes/va_notify_service.rb +++ b/lib/fakes/va_notify_service.rb @@ -1,6 +1,8 @@ # frozen_string_literal: true class Fakes::VANotifyService < ExternalApi::VANotifyService + VA_NOTIFY_ENDPOINT = "/api/v1/va_notify_update" + class << self # rubocop:disable Metrics/ParameterLists def send_email_notifications( @@ -11,7 +13,21 @@ def send_email_notifications( docket_number:, status: "" ) - fake_notification_response(email_template_id) + + external_id = SecureRandom.uuid + + unless Rails.deploy_env == :test + request = HTTPI::Request.new + request.url = "#{ENV['CASEFLOW_BASE_URL']}#{VA_NOTIFY_ENDPOINT}"\ + "?id=#{external_id}&status=delivered&to=test@example.com¬ification_type=email" + request.headers["Content-Type"] = "application/json" + request.headers["Authorization"] = "Bearer test" + request.auth.ssl.ca_cert_file = ENV["SSL_CERT_FILE"] + + HTTPI.post(request) + end + + fake_notification_response(email_template_id, status, external_id) end def send_sms_notifications( @@ -22,11 +38,25 @@ def send_sms_notifications( docket_number:, status: "" ) + + external_id = SecureRandom.uuid + + unless Rails.deploy_env == :test + request = HTTPI::Request.new + request.url = "#{ENV['CASEFLOW_BASE_URL']}#{VA_NOTIFY_ENDPOINT}"\ + "?id=#{external_id}&status=delivered&to=+15555555555¬ification_type=sms" + request.headers["Content-Type"] = "application/json" + request.headers["Authorization"] = "Bearer test" + request.auth.ssl.ca_cert_file = ENV["SSL_CERT_FILE"] + + HTTPI.post(request) + end + if participant_id.length.nil? return bad_participant_id_response end - fake_notification_response(sms_template_id) + fake_notification_response(sms_template_id, status, external_id) end # rubocop:enable Metrics/ParameterLists @@ -102,23 +132,23 @@ def bad_notification_response ) end - def fake_notification_response(email_template_id) + def fake_notification_response(template_id, status, external_id) HTTPI::Response.new( 200, {}, OpenStruct.new( - "id": SecureRandom.uuid, + "id": external_id, "reference": "string", "uri": "string", "template": { - "id" => email_template_id, + "id" => template_id, "version" => 0, "uri" => "string" }, "scheduled_for": "string", "content": { - "body" => "string", - "subject" => "string" + "body" => "Template: #{template_id} - Status: #{status}", + "subject" => "Test Subject" } ) ) diff --git a/lib/tasks/appeal_state_synchronizer.rake b/lib/tasks/appeal_state_synchronizer.rake index cd8e1f29149..ca1094a7c04 100644 --- a/lib/tasks/appeal_state_synchronizer.rake +++ b/lib/tasks/appeal_state_synchronizer.rake @@ -1,5 +1,7 @@ # frozen_string_literal: true +require "#{Rails.root}/app/helpers/sync_decided_appeals_helper.rb" + namespace :appeal_state_synchronizer do desc "Used to synchronize appeal_states table using data from other sources." task sync_appeal_states: :environment do @@ -11,6 +13,12 @@ namespace :appeal_state_synchronizer do backfill_appeal_information end + task sync_legacy_appeal_decisions: :environment do + include SyncDecidedAppealsHelper + + sync_decided_appeals + end + def map_appeal_hearing_scheduled_state(appeal_state) if !appeal_state.appeal&.hearings&.empty? && appeal_state.appeal.hearings.max_by(&:scheduled_for).disposition.nil? return { hearing_scheduled: true } diff --git a/newrelic.yml b/newrelic.yml deleted file mode 100644 index c074e6089ed..00000000000 --- a/newrelic.yml +++ /dev/null @@ -1,49 +0,0 @@ -# -# Generated November 03, 2017 -# -# For full documentation of agent configuration options, please refer to -# https://docs.newrelic.com/docs/agents/ruby-agent/installation-configuration/ruby-agent-configuration -common: &default_settings - app_name: Caseflow - - # Logging level for log/newrelic_agent.log - log_level: info - - # Exception messages may have PII, so we won't send them. - # If we are sure that certain exceptions will not have PII, then we can whitelist them in this config file. - strip_exception_messages.enabled: true - - # To avoid sending PII, we explicitly deny all headers and parameters. We then whitelist known safe attributes. - attributes.exclude: [response.headers.*, request.headers.*, request.parameters.*] - attributes.include: [ - response.headers.contentType, - response.headers.contentLength, - request.headers.userAgent, - request.headers.accept, - request.headers.host, - request.headers.contentType, - ] - -# Environment-specific settings are in this section. -# RAILS_ENV or RACK_ENV (as appropriate) is used to determine the environment. -# If your application has other named environments, configure them here. -development: - <<: *default_settings - app_name: Caseflow (Local Dev) - agent_enabled: false - -demo: - <<: *default_settings - app_name: Caseflow (Demo) - -test: - <<: *default_settings - # It doesn't make sense to report to New Relic from automated test runs. - monitor_mode: false - -staging: - <<: *default_settings - app_name: Caseflow (Staging) - -production: - <<: *default_settings diff --git a/spec/controllers/api/v1/va_notify_controller_spec.rb b/spec/controllers/api/v1/va_notify_controller_spec.rb index 895b7bd04f9..a3d01684b6b 100644 --- a/spec/controllers/api/v1/va_notify_controller_spec.rb +++ b/spec/controllers/api/v1/va_notify_controller_spec.rb @@ -4,9 +4,13 @@ include ActiveJob::TestHelper before { Seeds::NotificationEvents.new.seed! } + before(:each) { wipe_queues } + after(:all) { wipe_queues } + let(:sqs_client) { SqsService.sqs_client } let(:api_key) { ApiKey.create!(consumer_name: "API Consumer").key_string } let!(:appeal) { create(:appeal) } + let!(:queue) { create_queue("receive_notifications", true) } let!(:notification_email) do create( :notification, @@ -27,7 +31,7 @@ appeals_type: "Appeal", event_date: "2023-02-27 13:11:51.91467", event_type: Constants.EVENT_TYPE_FILTERS.quarterly_notification, - notification_type: "Email", + notification_type: "Sms", notified_at: "2023-02-28 14:11:51.91467", sms_notification_external_id: "3fa85f64-5717-4562-b3fc-2c963f66afa6", sms_notification_status: "Preferences Declined" @@ -36,6 +40,66 @@ let(:default_payload) do { id: "3fa85f64-5717-4562-b3fc-2c963f66afa6", + to: "to", + status_reason: "status_reason", + body: "string", + completed_at: "2023-04-17T12:38:48.699Z", + created_at: "2023-04-17T12:38:48.699Z", + created_by_name: "string", + email_address: "user@example.com", + line_1: "string", + line_2: "string", + line_3: "string", + line_4: "string", + line_5: "string", + line_6: "string", + phone_number: "+16502532222", + postage: "string", + postcode: "string", + reference: "string", + scheduled_for: "2023-04-17T12:38:48.699Z", + sent_at: "2023-04-17T12:38:48.699Z", + sent_by: "string", + status: "created", + subject: "string", + notification_type: "Email" + } + end + + let(:error_payload1) do + { + id: "3fa85f64-5717-4562-b3fc-2c963f66afa6", + to: "to", + status_reason: nil, + body: "string", + completed_at: "2023-04-17T12:38:48.699Z", + created_at: "2023-04-17T12:38:48.699Z", + created_by_name: "string", + email_address: "user@example.com", + line_1: "string", + line_2: "string", + line_3: "string", + line_4: "string", + line_5: "string", + line_6: "string", + phone_number: "+16502532222", + postage: "string", + postcode: "string", + reference: "string", + scheduled_for: "2023-04-17T12:38:48.699Z", + sent_at: "2023-04-17T12:38:48.699Z", + sent_by: "string", + status: "created", + subject: "string", + notification_type: "Email" + } + end + + let(:error_payload2) do + { + id: nil, + to: "to", + status_reason: "status_reason", body: "string", completed_at: "2023-04-17T12:38:48.699Z", created_at: "2023-04-17T12:38:48.699Z", @@ -56,11 +120,13 @@ sent_by: "string", status: "created", subject: "string", - notification_type: "" + notification_type: "Emailx" } end context "email notification status is changed" do + before { Seeds::NotificationEvents.new.seed! } + let(:payload_email) do default_payload.deep_dup.tap do |payload| payload[:notification_type] = "email" @@ -72,7 +138,6 @@ post :notifications_update, params: payload_email perform_enqueued_jobs { ProcessNotificationStatusUpdatesJob.perform_later } - expect(notification_email.reload.email_notification_status).to eq("created") end end @@ -89,7 +154,6 @@ post :notifications_update, params: payload_sms perform_enqueued_jobs { ProcessNotificationStatusUpdatesJob.perform_later } - expect(notification_sms.reload.sms_notification_status).to eq("created") end end @@ -98,6 +162,8 @@ let(:payload_fake) do { id: "fake", + to: "to", + status_reason: "status_reason", body: "string", completed_at: "2023-04-17T12:38:48.699Z", created_at: "2023-04-17T12:38:48.699Z", @@ -133,16 +199,156 @@ } end - it "Update job raises error if UUID is passed in for a non-existant notification" do - expect_any_instance_of(ProcessNotificationStatusUpdatesJob).to receive(:log_error) do |_job, error| - expect(error.message).to eq("No notification matches UUID #{payload_fake.dig(:id)}") + it "Update job runs cleanly when UUID is missing" do + request.headers["Authorization"] = "Bearer #{api_key}" + post :notifications_update, params: payload_fake + expect(response.status).to eq(200) + + perform_enqueued_jobs { ProcessNotificationStatusUpdatesJob.perform_later } + end + end + + context "payload missing required params" do + before { Seeds::NotificationEvents.new.seed! } + + let(:payload_email) do + error_payload1.deep_dup.tap do |payload| + payload[:notification_type] = "email" end + end + it "is missing the id and properly errors out" do request.headers["Authorization"] = "Bearer #{api_key}" - post :notifications_update, params: payload_fake + post :notifications_update, params: payload_email + + expect(response.status).to eq(200) + + perform_enqueued_jobs { ProcessNotificationStatusUpdatesJob.perform_later } + end + end + + context "payload status is delivered and status_reason and to are null" do + before { Seeds::NotificationEvents.new.seed! } + let(:payload) do + error_payload1.deep_dup.tap do |payload| + payload[:status] = "delivered" + payload[:status_reason] = nil + payload[:to] = nil + end + end + + it "updates status of notification" do + request.headers["Authorization"] = "Bearer #{api_key}" + post :notifications_update, params: payload + + perform_enqueued_jobs { ProcessNotificationStatusUpdatesJob.perform_later } + expect(response.status).to eq(200) + end + end + + context "payload status is delivered and status_reason is null" do + before { Seeds::NotificationEvents.new.seed! } + let(:payload) do + error_payload1.deep_dup.tap do |payload| + payload[:status] = "delivered" + payload[:status_reason] = nil + end + end + + it "updates status of notification" do + request.headers["Authorization"] = "Bearer #{api_key}" + post :notifications_update, params: payload + + perform_enqueued_jobs { ProcessNotificationStatusUpdatesJob.perform_later } + expect(response.status).to eq(200) + end + end + + context "payload status is delivered and to is null" do + before { Seeds::NotificationEvents.new.seed! } + let(:payload) do + error_payload1.deep_dup.tap do |payload| + payload[:status] = "delivered" + payload[:to] = nil + end + end + + it "updates status of notification" do + request.headers["Authorization"] = "Bearer #{api_key}" + post :notifications_update, params: payload + + perform_enqueued_jobs { ProcessNotificationStatusUpdatesJob.perform_later } + expect(response.status).to eq(200) + end + end + + context "payload status is NOT delivered and status reason and to are null" do + before { Seeds::NotificationEvents.new.seed! } + let(:payload) do + error_payload1.deep_dup.tap do |payload| + payload[:status] = "Pending Delivery" + payload[:to] = nil + payload[:status_reason] = nil + end + end + + it "updates status of notification" do + request.headers["Authorization"] = "Bearer #{api_key}" + post :notifications_update, params: payload + + perform_enqueued_jobs { ProcessNotificationStatusUpdatesJob.perform_later } expect(response.status).to eq(200) + end + end + + context "payload status is NOT delivered and status reason is null" do + before { Seeds::NotificationEvents.new.seed! } + let(:payload) do + error_payload1.deep_dup.tap do |payload| + payload[:status] = "Pending Delivery" + payload[:status_reason] = nil + end + end + + it "updates status of notification" do + request.headers["Authorization"] = "Bearer #{api_key}" + post :notifications_update, params: payload perform_enqueued_jobs { ProcessNotificationStatusUpdatesJob.perform_later } + expect(response.status).to eq(200) end end + + context "payload status is NOT delivered and to is null" do + before { Seeds::NotificationEvents.new.seed! } + let(:payload) do + error_payload1.deep_dup.tap do |payload| + payload[:status] = "Pending Delivery" + payload[:to] = nil + end + end + + it "updates status of notification" do + request.headers["Authorization"] = "Bearer #{api_key}" + post :notifications_update, params: payload + + perform_enqueued_jobs { ProcessNotificationStatusUpdatesJob.perform_later } + expect(response.status).to eq(200) + end + end + + def create_queue(name, fifo = false) + sqs_client.create_queue({ + queue_name: "caseflow_test_#{name}#{fifo ? '.fifo' : ''}".to_sym, + attributes: fifo ? { "FifoQueue" => "true" } : {} + }) + end + + def wipe_queues + client = SqsService.sqs_client + + queues_to_delete = client.list_queues.queue_urls.filter { |url| url.include?("caseflow_test") } + + queues_to_delete.each { |queue_url| client.delete_queue(queue_url: queue_url) } + end end diff --git a/spec/controllers/case_distribution_levers_controller_spec.rb b/spec/controllers/case_distribution_levers_controller_spec.rb index 9ab8fc26579..e4faa69a6e4 100644 --- a/spec/controllers/case_distribution_levers_controller_spec.rb +++ b/spec/controllers/case_distribution_levers_controller_spec.rb @@ -133,7 +133,7 @@ end it "renders a page with the grouped levers and lever history" do - lever_keys = %w[static batch affinity docket_distribution_prior docket_time_goal docket_levers] + lever_keys = %w[static batch affinity docket_distribution_prior docket_time_goal docket_levers internal] User.authenticate!(user: lever_user) OrganizationsUser.make_user_admin(lever_user, CDAControlGroup.singleton) get "levers" diff --git a/spec/controllers/idt/api/v2/appeals_controller_spec.rb b/spec/controllers/idt/api/v2/appeals_controller_spec.rb index c74497409af..7135ba24eaf 100644 --- a/spec/controllers/idt/api/v2/appeals_controller_spec.rb +++ b/spec/controllers/idt/api/v2/appeals_controller_spec.rb @@ -585,6 +585,37 @@ expect(JSON.parse(response.body)["message"]).to eq("Successful dispatch!") end + context "when notifications are enabled" do + include ActiveJob::TestHelper + let(:veteran) { create(:veteran) } + let(:contested_appeal) do + create( + :legacy_appeal, + vacols_case: create(:case, bfcorlid: veteran.file_number), + vbms_id: "#{veteran.file_number}S" + ) + end + before do + FeatureToggle.enable!(:va_notify_sms) + FeatureToggle.enable!(:va_notify_email) + Seeds::NotificationEvents.new.seed! unless NotificationEvent.count > 0 + end + + it "should send the appeal decision mailed non contested claim notification" do + perform_enqueued_jobs { post :outcode, params: params, as: :json } + + expect(Notification.last.event_type).to eq("Appeal decision mailed (Non-contested claims)") + end + + it "should send the appeal decision mailed contested claim notification" do + VACOLS::Representative.create!(repkey: contested_appeal.vacols_id, reptype: "C") + params[:appeal_id] = contested_appeal.vacols_id + + perform_enqueued_jobs { post :outcode, params: params, as: :json } + expect(Notification.last.event_type).to eq("Appeal decision mailed (Contested claims)") + end + end + context "when dispatch is associated with a mail request" do include ActiveJob::TestHelper diff --git a/spec/factories/case_distribution_lever.rb b/spec/factories/case_distribution_lever.rb index 30789851b51..442dc8b23c9 100644 --- a/spec/factories/case_distribution_lever.rb +++ b/spec/factories/case_distribution_lever.rb @@ -500,5 +500,16 @@ lever_group_order { 103 } control_group { "priority" } end + + trait :nonsscavlj_number_of_appeals_to_move do + item { "nonsscavlj_number_of_appeals_to_move" } + title { "Non-SSC/AVLJ Number of Appeals to Move" } + data_type { "number" } + value { 2 } + unit { "" } + algorithms_used { [] } + lever_group { "internal" } + lever_group_order { 999 } + end end end diff --git a/spec/factories/returned_appeal_job.rb b/spec/factories/returned_appeal_job.rb new file mode 100644 index 00000000000..ac17c3f28d8 --- /dev/null +++ b/spec/factories/returned_appeal_job.rb @@ -0,0 +1,11 @@ +# frozen_string_literal: true + +FactoryBot.define do + factory :returned_appeal_job do + started_at { Time.zone.now } + completed_at { Time.zone.now + 1.hour } + errored_at { nil } + stats { { success: true, message: "Job completed successfully" }.to_json } + returned_appeals { [] } + end +end diff --git a/spec/factories/user.rb b/spec/factories/user.rb index bfb5ebe7d85..27263655b0e 100644 --- a/spec/factories/user.rb +++ b/spec/factories/user.rb @@ -181,6 +181,30 @@ end end + trait :non_ssc_avlj_user do + after(:create) do |user| + create(:staff, :non_ssc_avlj, user: user) + end + end + + trait :ssc_avlj_user do + after(:create) do |user| + create(:staff, :ssc_avlj, user: user) + end + end + + trait :vlj_user do + after(:create) do |user| + create(:staff, :vlj, user: user) + end + end + + trait :inactive_non_ssc_avlj_user do + after(:create) do |user| + create(:staff, :inactive_non_ssc_avlj, user: user) + end + end + after(:create) do |user, evaluator| if evaluator.vacols_uniq_id create(:staff, slogid: evaluator.vacols_uniq_id, user: user) diff --git a/spec/factories/vacols/case.rb b/spec/factories/vacols/case.rb index e26209dca6b..1f06c8e1b87 100644 --- a/spec/factories/vacols/case.rb +++ b/spec/factories/vacols/case.rb @@ -197,6 +197,114 @@ end end + # The judge and attorney should be the VACOLS::Staff records of those users + # This factory uses the :aod trait to mark it AOD instead of a transient attribute + # Pass `tied_to: false` to create an original appeal without a previous hearing + factory :legacy_signed_appeal do + transient do + judge { nil } + signing_avlj { nil } + assigned_avlj { nil } + attorney { nil } + cavc { false } + appeal_affinity { true } + affinity_start_date { 2.months.ago } + tied_to { true } + end + + status_active + + bfdpdcn { 1.month.ago } + bfcurloc { "81" } + + after(:create) do |new_case, evaluator| + signing_judge = + if evaluator.signing_avlj.present? + VACOLS::Staff.find_by_sdomainid(evaluator.signing_avlj.css_id) + else + evaluator.judge || create(:user, :judge, :with_vacols_judge_record).vacols_staff + end + + hearing_judge = + if evaluator.assigned_avlj.present? + VACOLS::Staff.find_by_sdomainid(evaluator.assigned_avlj.css_id) + else + evaluator.judge || create(:user, :judge, :with_vacols_judge_record).vacols_staff + end + + signing_sattyid = signing_judge.sattyid + + original_attorney = evaluator.attorney || create(:user, :with_vacols_attorney_record).vacols_staff + + new_case.correspondent.update!(ssn: new_case.bfcorlid.chomp("S")) unless new_case.correspondent.ssn + + veteran = Veteran.find_by_file_number_or_ssn(new_case.correspondent.ssn) + + if veteran + new_case.correspondent.update!(snamef: veteran.first_name, snamel: veteran.last_name) + else + create( + :veteran, + first_name: new_case.correspondent.snamef, + last_name: new_case.correspondent.snamel, + name_suffix: new_case.correspondent.ssalut, + ssn: new_case.correspondent.ssn, + file_number: new_case.correspondent.ssn + ) + end + + # Build these instead of create so the folder after_create hooks don't execute and create another case + # until the original case has been created and the associations saved + original_folder = build( + :folder, + new_case.folder.attributes.except!("ticknum", "tidrecv", "tidcls", "tiaduser", + "tiadtime", "tikeywrd", "tiread2", "tioctime", "tiocuser", + "tidktime", "tidkuser") + ) + + original_issues = new_case.case_issues.map do |issue| + build( + :case_issue, + issue.attributes.except("isskey", "issaduser", "issadtime", "issmduser", "issmdtime", "issdcls"), + issdc: "3" + ) + end + + original_case = create( + :case, + :status_complete, + :disposition_remanded, + bfac: evaluator.cavc ? "7" : "1", + bfcorkey: new_case.bfcorkey, + bfcorlid: new_case.bfcorlid, + bfdnod: new_case.bfdnod, + bfdsoc: new_case.bfdsoc, + bfd19: new_case.bfd19, + bfcurloc: "99", + bfddec: new_case.bfdpdcn, + bfmemid: signing_sattyid, + bfattid: original_attorney.sattyid, + folder: original_folder, + correspondent: new_case.correspondent, + case_issues: original_issues + ) + + if evaluator.tied_to + create( + :case_hearing, + :disposition_held, + folder_nr: original_case.bfkey, + hearing_date: original_case.bfddec - 1.month, + user: User.find_by_css_id(hearing_judge&.sdomainid) + ) + end + + if evaluator.appeal_affinity + create(:appeal_affinity, appeal: new_case, affinity_start_date: evaluator.affinity_start_date) + end + end + end + # You can change the judge, attorney, AOD status, and Appeal Affinity of your Legacy CAVC Appeal. # The Appeal_Affinity is default but the AOD must be toggled on. Example: # "FactoryBot.create(:legacy_cavc_appeal, judge: judge, aod: true, affinity_start_date: 2.weeks.ago)" diff --git a/spec/factories/vacols/staff.rb b/spec/factories/vacols/staff.rb index 77519c47a42..b97ad00b73c 100644 --- a/spec/factories/vacols/staff.rb +++ b/spec/factories/vacols/staff.rb @@ -12,6 +12,16 @@ new_sattyid end + + judge do + judge_staff = VACOLS::Staff.find_by(slogid: "STAFF_FCT_JUDGE") || + create(:staff, :judge_role, slogid: "STAFF_FCT_JUDGE") + judge_staff + end + + generated_smemgrp_not_equal_to_sattyid do + judge.sattyid + end end sequence(:stafkey) do |n| @@ -117,6 +127,31 @@ sattyid { generated_sattyid } end + trait :non_ssc_avlj do + svlj { "A" } + sattyid { generated_sattyid } + smemgrp { generated_smemgrp_not_equal_to_sattyid } + end + + trait :inactive_non_ssc_avlj do + svlj { "A" } + sactive { "I" } + sattyid { generated_sattyid } + smemgrp { generated_smemgrp_not_equal_to_sattyid } + end + + trait :ssc_avlj do + svlj { "A" } + sattyid { generated_sattyid } + smemgrp { sattyid } + end + + trait :vlj do + svlj { "J" } + sattyid { generated_sattyid } + smemgrp { sattyid } + end + after(:build) do |staff, evaluator| if evaluator.user&.full_name staff.snamef = evaluator.user.full_name.split(" ").first diff --git a/spec/helpers/sync_decided_appeals_helper_spec.rb b/spec/helpers/sync_decided_appeals_helper_spec.rb new file mode 100644 index 00000000000..b55dbe97ae7 --- /dev/null +++ b/spec/helpers/sync_decided_appeals_helper_spec.rb @@ -0,0 +1,75 @@ +# frozen_string_literal: true + +require_relative "../../app/helpers/sync_decided_appeals_helper" + +describe "SyncDecidedAppealsHelper" do + self.use_transactional_tests = false + + class Helper + include SyncDecidedAppealsHelper + end + + attr_reader :helper + + subject do + Helper.new + end + + context "#sync_decided_appeals" do + let(:decided_appeal_state) do + create_decided_appeal_state_with_case_record_and_hearing(true, true) + end + + let(:undecided_appeal_state) do + create_decided_appeal_state_with_case_record_and_hearing(false, true) + end + + let(:missing_vacols_case_appeal_state) do + create_decided_appeal_state_with_case_record_and_hearing(true, false) + end + + it "Job syncs decided appeals decision_mailed status", bypass_cleaner: true do + expect([decided_appeal_state, + undecided_appeal_state, + missing_vacols_case_appeal_state].all?(&:decision_mailed)).to eq false + + subject.sync_decided_appeals + + expect(decided_appeal_state.reload.decision_mailed).to eq true + expect(undecided_appeal_state.reload.decision_mailed).to eq false + expect(missing_vacols_case_appeal_state.reload.decision_mailed).to eq false + end + + it "catches standard errors", bypass_cleaner: true do + expect([decided_appeal_state, + undecided_appeal_state, + missing_vacols_case_appeal_state].all?(&:decision_mailed)).to eq false + + error_text = "Fatal error in sync_decided_appeals_helper" + allow(AppealState).to receive(:legacy).and_raise(StandardError.new(error_text)) + + expect(Rails.logger).to receive(:error) + + expect { subject.sync_decided_appeals }.to raise_error(StandardError) + end + + # Clean up parallel threads + after(:each) { clean_up_after_threads } + + # VACOLS record's decision date will be set to simulate a decided appeal + # decision_mailed will be set to false for the AppealState to verify the method + # functionality + def create_decided_appeal_state_with_case_record_and_hearing(decided_appeal, create_case) + case_hearing = create(:case_hearing) + decision_date = decided_appeal ? Time.current : nil + vacols_case = create_case ? create(:case, case_hearings: [case_hearing], bfddec: decision_date) : nil + appeal = create(:legacy_appeal, vacols_case: vacols_case) + + appeal.appeal_state.tap { _1.update!(decision_mailed: false) } + end + + def clean_up_after_threads + DatabaseCleaner.clean_with(:truncation, except: %w[vftypes issref notification_events]) + end + end +end diff --git a/spec/jobs/hearings/receive_notification_job_spec.rb b/spec/jobs/hearings/receive_notification_job_spec.rb deleted file mode 100644 index ab8a326a4ea..00000000000 --- a/spec/jobs/hearings/receive_notification_job_spec.rb +++ /dev/null @@ -1,182 +0,0 @@ -# frozen_string_literal: true - -# Testing plan: -# - 1. Create test records usiong factories and take note of notification ID and specific fields to compare -# - 2. Use custom message defined here to pass in perform method -# - 3. Test perform method by checking if field values in DB recored are equal to the field values in the message, -# - An update to record should only be called whenever there are differences between the message and the record in DB -# - 4. The updated record should be returned - -describe ReceiveNotificationJob, type: :job do - include ActiveJob::TestHelper - let(:current_user) { create(:user, roles: ["System Admin"]) } - # rubocop:disable Style/BlockDelimiters - let(:message) { - { - queue_url: "http://example_queue", - message_body: "Notification", - message_attributes: { - "id": { - data_type: "String", - string_value: "3fa85f64-5717-4562-b3fc-2c963f66afa6" - }, - "body": { - data_type: "String", - string_value: "AString" - }, - "created_at": { - data_type: "String", - string_value: "2022-09-02T20:40:11.184Z" - }, - "completed_at": { - data_type: "String", - string_value: "2022-09-02T20:40:11.184Z" - }, - "created_by_name": { - data_type: "String", - string_value: "John" - }, - "email_address": { - data_type: "String", - string_value: "user@example.com" - }, - "line_1": { - data_type: "String", - string_value: "address" - }, - "line_2": { - data_type: "String", - string_value: "address" - }, - "line_3": { - data_type: "String", - string_value: "address" - }, - "line_4": { - data_type: "String", - string_value: "address" - }, - "line_5": { - data_type: "String", - string_value: "address" - }, - "line_6": { - data_type: "String", - string_value: "address" - }, - "phone_number": { - data_type: "String", - string_value: nil - }, - "postage": { - data_type: "String", - string_value: "postage" - }, - "postcode": { - data_type: "String", - string_value: "postcode" - }, - "reference": { - data_type: "String", - string_value: "9" - }, - "scheduled_for": { - data_type: "String", - string_value: "2022-09-02T20:40:11.184Z" - }, - "sent_at": { - data_type: "String", - string_value: "2022-09-02T20:40:11.184Z" - }, - "sent_by": { - data_type: "String", - string_value: "sent-by" - }, - "status": { - data_type: "String", - string_value: "delivered" - }, - "subject": { - data_type: "String", - string_value: "subject" - }, - "type": { - string_value: "email", - data_type: "String" - } - - } - } - } - - # rubocop:enable Style/BlockDelimiters - let(:queue_name) { "caseflow_test_receive_notifications" } - - after do - clear_enqueued_jobs - clear_performed_jobs - end - - it "it is the correct queue" do - expect(ReceiveNotificationJob.new.queue_name).to eq(queue_name) - end - - context ".perform" do - # create notification event record - let(:hearing_scheduled_event) do - NotificationEvent.find_or_create_by(event_type: Constants.EVENT_TYPE_FILTERS.hearing_scheduled) do |event| - event.email_template_id = "27bf814b-f065-4fc8-89af-ae1292db894e" - event.sms_template_id = "c2798da3-4c7a-43ed-bc16-599329eaf7cc" - end - end - # create notification record - let(:notification) do - create(:notification, id: 9, appeals_id: 4, appeals_type: "Appeal", - event_type: Constants.EVENT_TYPE_FILTERS.hearing_scheduled, - participant_id: "123456789", notification_type: "Email", recipient_email: "", - event_date: Time.zone.now, email_notification_status: "Success") - end - - # add message to queue - subject(:job) { ReceiveNotificationJob.perform_later(message) } - - # make sure job count increases by 1 - describe "send message to queue" do - it "has one message in queue" do - expect { job }.to change(ActiveJob::Base.queue_adapter.enqueued_jobs, :size).by(1) - end - - # After receiving the notification (by notification id), check : if email is same, if number is still nil, - # if status changed form Success to delivered - it "updates notification" do - hearing_scheduled_event - notification - - # obtain record from compare_notification_audit_record function - record = ReceiveNotificationJob.perform_now(message) - - # run checks - expect(record.recipient_email).to eq(message[:message_attributes][:email_address][:string_value]) - expect(record.recipient_phone_number).to eq(nil) - expect(record.email_notification_status).to eq(message[:message_attributes][:status][:string_value].capitalize) - end - end - - describe "errors" do - it "logs error when message is nil" do - expect(Rails.logger).to receive(:error).with(/There was no message passed/) - perform_enqueued_jobs do - ReceiveNotificationJob.perform_later(nil) - end - end - - it "logs error when message_attributes is nil" do - message[:message_attributes] = nil - expect(Rails.logger).to receive(:error).with(/message_attributes was nil/) - perform_enqueued_jobs do - ReceiveNotificationJob.perform_later(message) - end - end - end - end -end diff --git a/spec/jobs/nightly_syncs_job_spec.rb b/spec/jobs/nightly_syncs_job_spec.rb index 700c5672e5b..2af498ffbfb 100644 --- a/spec/jobs/nightly_syncs_job_spec.rb +++ b/spec/jobs/nightly_syncs_job_spec.rb @@ -201,6 +201,23 @@ class FakeTask < Dispatch::Task expect(held_hearing_appeal_state.reload.hearing_scheduled).to eq false end + it "catches standard errors" do + expect([pending_hearing_appeal_state, + postponed_hearing_appeal_state, + withdrawn_hearing_appeal_state, + scheduled_in_error_hearing_appeal_state, + held_hearing_appeal_state].all?(&:hearing_scheduled)).to eq true + + allow(AppealState).to receive(:where).and_raise(StandardError) + slack_msg = "" + slack_msg_error_text = "Fatal error in sync_hearing_states" + allow_any_instance_of(SlackService).to receive(:send_notification) { |_, first_arg| slack_msg = first_arg } + + subject + + expect(slack_msg.include?(slack_msg_error_text)).to be true + end + # Hearing scheduled will be set to true to simulate Caseflow missing a # disposition update. def create_appeal_state_with_case_record_and_hearing(desired_disposition) @@ -213,6 +230,66 @@ def create_appeal_state_with_case_record_and_hearing(desired_disposition) end end + context "#sync_decided_appeals" do + let(:decided_appeal_state) do + create_decided_appeal_state_with_case_record_and_hearing(true, true) + end + + let(:undecided_appeal_state) do + create_decided_appeal_state_with_case_record_and_hearing(false, true) + end + + let(:missing_vacols_case_appeal_state) do + create_decided_appeal_state_with_case_record_and_hearing(true, false) + end + + it "Job syncs decided appeals decision_mailed status", bypass_cleaner: true do + expect([decided_appeal_state, + undecided_appeal_state, + missing_vacols_case_appeal_state].all?(&:decision_mailed)).to eq false + + subject + + expect(decided_appeal_state.reload.decision_mailed).to eq true + expect(undecided_appeal_state.reload.decision_mailed).to eq false + expect(missing_vacols_case_appeal_state.reload.decision_mailed).to eq false + end + + it "catches standard errors", bypass_cleaner: true do + expect([decided_appeal_state, + undecided_appeal_state, + missing_vacols_case_appeal_state].all?(&:decision_mailed)).to eq false + + allow(AppealState).to receive(:legacy).and_raise(StandardError) + slack_msg = "" + slack_msg_error_text = "Fatal error in sync_decided_appeals" + allow_any_instance_of(SlackService).to receive(:send_notification) { |_, first_arg| slack_msg = first_arg } + + subject + + expect(slack_msg.include?(slack_msg_error_text)).to be true + end + + # Clean up parallel threads + after(:each) { clean_up_after_threads } + + # VACOLS record's decision date will be set to simulate a decided appeal + # decision_mailed will be set to false for the AppealState to verify the method + # functionality + def create_decided_appeal_state_with_case_record_and_hearing(decided_appeal, create_case) + case_hearing = create(:case_hearing) + decision_date = decided_appeal ? Time.current : nil + vacols_case = create_case ? create(:case, case_hearings: [case_hearing], bfddec: decision_date) : nil + appeal = create(:legacy_appeal, vacols_case: vacols_case) + + appeal.appeal_state.tap { _1.update!(decision_mailed: false) } + end + + def clean_up_after_threads + DatabaseCleaner.clean_with(:truncation, except: %w[vftypes issref notification_events]) + end + end + context "when errors occur" do context "in the sync_vacols_cases step" do context "due to existing FK associations" do diff --git a/spec/jobs/notification_initialization_job_spec.rb b/spec/jobs/notification_initialization_job_spec.rb index 3b86fb7e74d..bf1e2d3fb15 100644 --- a/spec/jobs/notification_initialization_job_spec.rb +++ b/spec/jobs/notification_initialization_job_spec.rb @@ -49,6 +49,10 @@ ) end + before do + InitialTasksFactory.new(appeal_state.appeal).create_root_and_sub_tasks! + end + it "enqueues an SendNotificationJob" do expect { subject }.to have_enqueued_job(SendNotificationJob) end diff --git a/spec/jobs/process_decision_document_job_spec.rb b/spec/jobs/process_decision_document_job_spec.rb index f7295efe73a..ca8210c3ed5 100644 --- a/spec/jobs/process_decision_document_job_spec.rb +++ b/spec/jobs/process_decision_document_job_spec.rb @@ -1,8 +1,10 @@ # frozen_string_literal: true describe ProcessDecisionDocumentJob do + let(:contested) { true } + context ".perform" do - subject { ProcessDecisionDocumentJob.perform_now(decision_document.id) } + subject { ProcessDecisionDocumentJob.perform_now(decision_document.id, contested) } let(:decision_document) { build_stubbed(:decision_document) } diff --git a/spec/jobs/process_notification_status_updates_job_spec.rb b/spec/jobs/process_notification_status_updates_job_spec.rb index a5f94379961..e40699a0e6f 100644 --- a/spec/jobs/process_notification_status_updates_job_spec.rb +++ b/spec/jobs/process_notification_status_updates_job_spec.rb @@ -3,107 +3,163 @@ describe ProcessNotificationStatusUpdatesJob, type: :job do include ActiveJob::TestHelper - let(:redis) do - # Creates a fresh Redis connection before each test and deletes all keys in the store - Redis.new(url: Rails.application.secrets.redis_url_cache).tap(&:flushall) - end + before(:each) { wipe_queues } + after(:all) { wipe_queues } + + let(:sqs_client) { SqsService.sqs_client } context ".perform" do before { Seeds::NotificationEvents.new.seed! } subject(:job) { ProcessNotificationStatusUpdatesJob.perform_later } - let(:new_status) { "test_status" } let(:appeal) { create(:appeal, veteran_file_number: "500000102", receipt_date: 6.months.ago.to_date.mdY) } + + let(:email_external_id) { SecureRandom.uuid } let(:email_notification) do create(:notification, appeals_id: appeal.uuid, appeals_type: "Appeal", event_date: 6.days.ago, event_type: Constants.EVENT_TYPE_FILTERS.quarterly_notification, notification_type: "Email", - email_notification_external_id: SecureRandom.uuid) + email_notification_external_id: email_external_id) end + + let(:sms_external_id) { SecureRandom.uuid } let(:sms_notification) do create(:notification, appeals_id: appeal.uuid, appeals_type: "Appeal", event_date: 6.days.ago, event_type: Constants.EVENT_TYPE_FILTERS.hearing_scheduled, - sms_notification_external_id: SecureRandom.uuid, + sms_notification_external_id: sms_external_id, notification_type: "SMS") end - it "has one message in queue" do - expect { job }.to change(ActiveJob::Base.queue_adapter.enqueued_jobs, :size).by(1) - end - - it "processes email notifications from redis cache" do - expect(email_notification.email_notification_status).to_not eq(new_status) - - create_cache_entries(email_notification) - - expect(redis.keys.grep(/email_update:/).count).to eq(1) - - perform_enqueued_jobs { ProcessNotificationStatusUpdatesJob.perform_later } - - expect(redis.keys.grep(/email_update:/).count).to eq(0) - expect(email_notification.reload.email_notification_status).to eq(new_status) + let(:sms_notification_2) do + create(:notification, appeals_id: appeal.uuid, + appeals_type: "Appeal", + event_date: 6.days.ago, + event_type: Constants.EVENT_TYPE_FILTERS.postponement_of_hearing, + sms_notification_external_id: "1234", + notification_type: "SMS") end - it "processes sms notifications from redis cache" do - expect(sms_notification.sms_notification_status).to_not eq(new_status) - - create_cache_entries(sms_notification) - - expect(redis.keys.grep(/sms_update:/).count).to eq(1) - - perform_enqueued_jobs { ProcessNotificationStatusUpdatesJob.perform_later } - - expect(redis.keys.grep(/sms_update:/).count).to eq(0) - expect(sms_notification.reload.sms_notification_status).to eq(new_status) + it "has one message in queue" do + expect { job }.to change(ActiveJob::Base.queue_adapter.enqueued_jobs, :size).by(1) end - it "processes a mix of email and sms notifications from redis cache" do - create_cache_entries(sms_notification, email_notification) - - expect(redis.keys.grep(/(sms|email)_update:/).count).to eq(2) - - perform_enqueued_jobs { ProcessNotificationStatusUpdatesJob.perform_later } - - expect(redis.keys.grep(/(sms|email)_update:/).count).to eq(0) - expect(email_notification.reload.email_notification_status).to eq(new_status) - expect(sms_notification.reload.sms_notification_status).to eq(new_status) + context "Updates are pulled from the SQS queue and applied to the datebase" do + let(:recipient_email) { "test@test.com" } + let(:email_status) { "delivered" } + let(:email_status_reason) { "Email delivery was succesful" } + + let(:recipient_phone) { "123-456-7890" } + let(:sms_status) { "temporary-failure" } + let(:sms_status_reason) { "Provider is retrying." } + + let(:test_queue) do + sqs_client.create_queue({ + queue_name: "caseflow_test_receive_notifications.fifo".to_sym, + attributes: { + "FifoQueue" => "true" + } + }) + end + let(:queue_url) { test_queue.queue_url } + let!(:sms_sqs_message) do + sqs_client.send_message( + queue_url: queue_url, + message_body: { + notification_type: "sms", + external_id: sms_external_id, + status: sms_status, + status_reason: sms_status_reason, + recipient: recipient_phone + }.to_json, + message_deduplication_id: "1", + message_group_id: ProcessNotificationStatusUpdatesJob::MESSAGE_GROUP_ID + ) + end + + let!(:sms_sqs_message_wrong_group_id) do + sqs_client.send_message( + queue_url: queue_url, + message_body: { + notification_type: "sms", + external_id: "1234", + status: sms_status, + status_reason: sms_status_reason, + recipient: recipient_phone + }.to_json, + message_deduplication_id: "2", + message_group_id: "SomethingElse" + ) + end + + let!(:email_sqs_message) do + sqs_client.send_message( + queue_url: queue_url, + message_body: { + notification_type: "email", + external_id: email_external_id, + status: email_status, + status_reason: email_status_reason, + recipient: recipient_email + }.to_json, + message_deduplication_id: "3", + message_group_id: ProcessNotificationStatusUpdatesJob::MESSAGE_GROUP_ID + ) + end + + it "Status update info from messages with correct group ID is persisted correctly" do + expect(all_message_info_empty?).to eq true + + perform_enqueued_jobs { job } + + # Reload records + [email_notification, sms_notification, sms_notification_2].each(&:reload) + + expect(email_notification.email_notification_status).to eq email_status + expect(email_notification.email_status_reason).to eq email_status_reason + expect(email_notification.recipient_email).to eq recipient_email + + expect(sms_notification.sms_notification_status).to eq sms_status + expect(sms_notification.sms_status_reason).to eq sms_status_reason + expect(sms_notification.recipient_phone_number).to eq recipient_phone + + # Update with the wrong message_group_id should have been skipped. + expect([ + sms_notification_2.sms_notification_status, + sms_notification_2.sms_status_reason, + sms_notification_2.recipient_phone_number + ].all?(&:nil?)).to eq true + end end + end - it "an error is raised if a UUID doesn't match with a notification record, but the job isn't halted" do - expect_any_instance_of(ProcessNotificationStatusUpdatesJob).to receive(:log_error) do |_job, error| - expect(error.message).to eq("No notification matches UUID not-going-to-match") - end.exactly(:once) - - # This notification update will cause an error - redis.set("sms_update:not-going-to-match:#{new_status}", 0) - - # This notification update should be fine - create_cache_entries(email_notification) - - expect(redis.keys.grep(/(sms|email)_update:/).count).to eq(2) - - perform_enqueued_jobs { ProcessNotificationStatusUpdatesJob.perform_later } - - expect(sms_notification.reload.sms_notification_status).to be_nil - expect(email_notification.reload.email_notification_status).to eq(new_status) - - expect(redis.keys.grep(/(sms|email)_update:/).count).to eq(0) - end + def all_message_info_empty? + [ + email_notification.email_notification_status, + email_notification.email_status_reason, + email_notification.recipient_email + ].all?(&:nil?) && + [ + sms_notification.sms_notification_status, + sms_notification.sms_status_reason, + sms_notification.recipient_phone_number + ].all?(&:nil?) && + [ + sms_notification_2.sms_notification_status, + sms_notification_2.sms_status_reason, + sms_notification_2.recipient_phone_number + ].all?(&:nil?) end - private + def wipe_queues + client = SqsService.sqs_client - def create_cache_entries(*keys) - keys.each do |key| - notification_type = key.notification_type.downcase - external_id = key.send("#{notification_type}_notification_external_id".to_sym) + queues_to_delete = client.list_queues.queue_urls.filter { |url| url.include?("caseflow_test") } - redis.set("#{notification_type}_update:#{external_id}:#{new_status}", 0) - end + queues_to_delete.each { |queue_url| client.delete_queue(queue_url: queue_url) } end end diff --git a/spec/jobs/push_priority_appeals_to_judges_job_spec.rb b/spec/jobs/push_priority_appeals_to_judges_job_spec.rb index 4ff0b7d9df8..3c2602cb8a8 100644 --- a/spec/jobs/push_priority_appeals_to_judges_job_spec.rb +++ b/spec/jobs/push_priority_appeals_to_judges_job_spec.rb @@ -14,6 +14,7 @@ create(:case_distribution_lever, :ama_hearing_case_aod_affinity_days) create(:case_distribution_lever, :ama_direct_review_start_distribution_prior_to_goals) create(:case_distribution_lever, :disable_legacy_non_priority) + create(:case_distribution_lever, :nonsscavlj_number_of_appeals_to_move) end def to_judge_hash(arr) @@ -24,8 +25,6 @@ def to_judge_hash(arr) before do expect_any_instance_of(PushPriorityAppealsToJudgesJob) .to receive(:distribute_genpop_priority_appeals).and_return([]) - expect_any_instance_of(PushPriorityAppealsToJudgesJob) - .to receive(:generate_report).and_return([]) end after { FeatureToggle.disable!(:acd_distribute_by_docket_date) } @@ -52,6 +51,13 @@ def to_judge_hash(arr) subject end + + it "calls send_job_report method" do + expect_any_instance_of(PushPriorityAppealsToJudgesJob) + .to receive(:generate_report).and_return([]) + + subject + end end context ".distribute_non_genpop_priority_appeals" do @@ -220,29 +226,6 @@ def to_judge_hash(arr) subject { PushPriorityAppealsToJudgesJob.new.distribute_non_genpop_priority_appeals } - context "using Automatic Case Distribution module" do - before do - create(:case_distribution_lever, :disable_legacy_priority) - allow_any_instance_of(PushPriorityAppealsToJudgesJob).to receive(:eligible_judges).and_return(eligible_judges) - end - - xit "should only distribute the ready priority cases tied to a judge" do - expect(subject.count).to eq eligible_judges.count - expect(subject.map { |dist| dist.statistics["batch_size"] }).to match_array [2, 2, 0, 0] - - # Ensure we only distributed the 2 ready legacy and hearing priority cases that are tied to a judge - distributed_cases = DistributedCase.where(distribution: subject) - expect(distributed_cases.count).to eq 4 - expected_array = [ready_priority_bfkey, ready_priority_bfkey2, ready_priority_uuid, ready_priority_uuid2] - expect(distributed_cases.map(&:case_id)).to match_array expected_array - # Ensure all docket types cases are distributed, including the 5 cavc evidence submission cases - expected_array2 = [Constants.AMA_DOCKETS.hearing, Constants.AMA_DOCKETS.hearing, "legacy", "legacy"] - expect(distributed_cases.map(&:docket)).to match_array expected_array2 - expect(distributed_cases.map(&:priority).uniq).to match_array [true] - expect(distributed_cases.map(&:genpop).uniq).to match_array [false] - end - end - context "using By Docket Date Distribution module" do before do FeatureToggle.enable!(:acd_distribute_by_docket_date) @@ -253,72 +236,6 @@ def to_judge_hash(arr) FeatureToggle.disable!(:acd_distribute_by_docket_date) FeatureToggle.disable!(:acd_exclude_from_affinity) end - context "without using Docket Levers" do - before do - create(:case_distribution_lever, :disable_legacy_priority, value: "false") - end - - xit "should only distribute the ready priority cases tied to a judge" do - expect(subject.count).to eq eligible_judges.count - expect(subject.map { |dist| dist.statistics["batch_size"] }).to match_array [2, 2, 0, 0] - - # Ensure we only distributed the 2 ready legacy and hearing priority cases that are tied to a judge - distributed_cases = DistributedCase.where(distribution: subject) - expect(distributed_cases.count).to eq 4 - expected_array = [ready_priority_bfkey, ready_priority_bfkey2, ready_priority_uuid, ready_priority_uuid2] - expect(distributed_cases.map(&:case_id)).to match_array expected_array - # Ensure all docket types cases are distributed, including the 5 cavc evidence submission cases - expected_array2 = %w[hearing hearing legacy legacy] - expect(distributed_cases.map(&:docket)).to match_array expected_array2 - expect(distributed_cases.map(&:priority).uniq).to match_array [true] - expect(distributed_cases.map(&:genpop).uniq).to match_array [false, true] - end - end - - context "using Excluding Appeals by Docket Type and Priority from Automatic Case Distribution levers" do - context "all Exluding levers turned to Include" do - before do - create(:case_distribution_lever, :disable_legacy_priority, value: "false") - create(:case_distribution_lever, :disable_ama_priority_hearing, value: "false") - create(:case_distribution_lever, :disable_ama_priority_direct_review, value: "false") - create(:case_distribution_lever, :disable_ama_priority_evidence_submission, value: "false") - end - - xit "should distribute the ready priority cases" do - expect(subject.count).to eq eligible_judges.count - expect(subject.map { |dist| dist.statistics["batch_size"] }).to match_array [2, 2, 0, 0] - - distributed_cases = DistributedCase.where(distribution: subject) - expect(distributed_cases.count).to eq 4 - expected_array = [ready_priority_bfkey, ready_priority_bfkey2, ready_priority_uuid, ready_priority_uuid2] - expect(distributed_cases.map(&:case_id)).to match_array expected_array - # Ensure all docket types cases are distributed, including the 5 cavc evidence submission cases - expected_array2 = %w[hearing hearing legacy legacy] - expect(distributed_cases.map(&:docket)).to match_array expected_array2 - expect(distributed_cases.map(&:priority).uniq).to match_array [true] - expect(distributed_cases.map(&:genpop).uniq).to match_array [false, true] - end - end - - context "all Exluding levers turned to Exclude" do - before do - create(:case_distribution_lever, :disable_legacy_priority, value: "true") - create(:case_distribution_lever, :disable_ama_priority_hearing, value: "true") - create(:case_distribution_lever, :disable_ama_priority_direct_review, value: "true") - create(:case_distribution_lever, :disable_ama_priority_evidence_submission, value: "true") - end - - xit "should not distribute the ready priority cases" do - expect(subject.count).to eq eligible_judges.count - expect(subject.map { |dist| dist.statistics["batch_size"] }).to match_array [0, 0, 0, 0] - - distributed_cases = DistributedCase.where(distribution: subject) - expect(distributed_cases.count).to eq 0 - expected_array = [] - expect(distributed_cases).to match_array expected_array - end - end - end end end @@ -627,7 +544,6 @@ def to_judge_hash(arr) it "using By Docket Date Distribution module" do FeatureToggle.enable!(:acd_distribute_by_docket_date) - today = Time.zone.now.to_date legacy_days_waiting = (today - legacy_priority_case.bfd19.to_date).to_i direct_review_days_waiting = (today - ready_priority_direct_case.receipt_date).to_i diff --git a/spec/jobs/quarterly_notifications_job_spec.rb b/spec/jobs/quarterly_notifications_job_spec.rb index f7ba2c0a12b..55a368e53c7 100644 --- a/spec/jobs/quarterly_notifications_job_spec.rb +++ b/spec/jobs/quarterly_notifications_job_spec.rb @@ -2,6 +2,7 @@ describe QuarterlyNotificationsJob, type: :job do include ActiveJob::TestHelper + let(:appeal) { create(:appeal, :active) } let(:legacy_appeal) { create(:legacy_appeal, vacols_case: vacols_case) } let(:vacols_case) { create(:case) } @@ -48,6 +49,7 @@ it "pushes a new message" do expect_message_to_be_queued + expect_message_to_have_status("docketed") subject end @@ -68,6 +70,7 @@ it "pushes a new message" do expect_message_to_be_queued + expect_message_to_have_status("docketed") subject end @@ -89,6 +92,7 @@ it "pushes a new message" do expect_message_to_be_queued + expect_message_to_have_status("Privacy Act Pending") subject end @@ -109,6 +113,7 @@ it "pushes a new message" do expect_message_to_be_queued + expect_message_to_have_status("docketed") subject end @@ -123,13 +128,14 @@ created_by_id: user.id, updated_by_id: user.id, appeal_docketed: true, - hearing_withdrawn: true, - scheduled_in_error: true + scheduled_in_error: true, + privacy_act_pending: true ) end it "pushes a new message" do expect_message_to_be_queued + expect_message_to_have_status("Privacy Act Pending") subject end @@ -150,26 +156,7 @@ it "pushes a new message" do expect_message_to_be_queued - - subject - end - end - - context "Hearing Scheduled / Privacy Act Pending with ihp task" do - let(:hearing) { create(:hearing, :with_tasks) } - let!(:appeal_state) do - hearing.appeal.appeal_state.tap do - _1.update!( - appeal_docketed: true, - hearing_scheduled: true, - privacy_act_pending: true, - vso_ihp_pending: true - ) - end - end - - it "pushes a new message" do - expect_message_to_be_queued + expect_message_to_have_status("docketed") subject end @@ -191,25 +178,7 @@ it "pushes a new message" do expect_message_to_be_queued - - subject - end - end - - context "Hearing Scheduled with ihp task pending" do - let(:hearing) { create(:hearing, :with_tasks) } - let!(:appeal_state) do - hearing.appeal.appeal_state.tap do - _1.update!( - appeal_docketed: true, - hearing_scheduled: true, - vso_ihp_pending: true - ) - end - end - - it "pushes a new message" do - expect_message_to_be_queued + expect_message_to_have_status("VSO IHP Pending / Privacy Act Pending") subject end @@ -229,6 +198,7 @@ it "pushes a new message" do expect_message_to_be_queued + expect_message_to_have_status("Hearing Scheduled / Privacy Act Pending") subject end @@ -249,6 +219,7 @@ it "pushes a new message" do expect_message_to_be_queued + expect_message_to_have_status("Privacy Act Pending") subject end @@ -269,6 +240,7 @@ it "pushes a new message" do expect_message_to_be_queued + expect_message_to_have_status("VSO IHP Pending") subject end @@ -286,6 +258,7 @@ it "pushes a new message" do expect_message_to_be_queued + expect_message_to_have_status("Hearing Scheduled") subject end @@ -339,6 +312,17 @@ def expect_message_to_be_queued ) end + def expect_message_to_have_status(status) + expect_any_instance_of(NotificationInitializationJob) + .to receive(:initialize) + .with({ + appeal_id: appeal_state.appeal_id, + appeal_type: appeal_state.appeal_type, + template_name: Constants.EVENT_TYPE_FILTERS.quarterly_notification, + appeal_status: status + }) + end + def expect_message_to_not_be_enqueued expect_any_instance_of(QuarterlyNotificationsJob) .to_not receive(:enqueue_init_jobs) diff --git a/spec/jobs/return_legacy_appeals_to_board_job_spec.rb b/spec/jobs/return_legacy_appeals_to_board_job_spec.rb new file mode 100644 index 00000000000..7c82e696fe9 --- /dev/null +++ b/spec/jobs/return_legacy_appeals_to_board_job_spec.rb @@ -0,0 +1,620 @@ +# frozen_string_literal: true + +describe ReturnLegacyAppealsToBoardJob, :all_dbs do + describe "#perform" do + let(:job) { described_class.new } + let(:returned_appeal_job) { instance_double("ReturnedAppealJob", id: 1) } + let(:appeals) { [{ "bfkey" => "1", "priority" => 1 }, { "bfkey" => "2", "priority" => 0 }] } + let(:moved_appeals) { [{ "bfkey" => "1", "priority" => 1 }] } + + before do + allow(CaseDistributionLever).to receive(:nonsscavlj_number_of_appeals_to_move).and_return(2) + + allow(job).to receive(:create_returned_appeal_job).and_return(returned_appeal_job) + allow(returned_appeal_job).to receive(:update!) + allow(job).to receive(:eligible_and_moved_appeals).and_return([appeals, moved_appeals]) + allow(job).to receive(:filter_appeals).and_return({}) + allow(job).to receive(:send_job_slack_report) + allow(job).to receive(:complete_returned_appeal_job) + allow(job).to receive(:metrics_service_report_runtime) + end + + context "when the job completes successfully" do + it "creates a ReturnedAppealJob instance, processes appeals, and sends a report" do + allow(job).to receive(:slack_report).and_return(["Job completed successfully"]) + + job.perform + + expect(job).to have_received(:create_returned_appeal_job).once + expect(job).to have_received(:eligible_and_moved_appeals).once + expect(job).to have_received(:complete_returned_appeal_job) + .with(returned_appeal_job, "Job completed successfully", moved_appeals).once + expect(job).to have_received(:send_job_slack_report).with(["Job completed successfully"]).once + expect(job).to have_received(:metrics_service_report_runtime) + .with(metric_group_name: "return_legacy_appeals_to_board_job").once + end + end + + context "when no appeals are moved" do + before do + allow(job).to receive(:eligible_and_moved_appeals).and_return([appeals, nil]) + allow(job).to receive(:complete_returned_appeal_job) + allow(job).to receive(:send_job_slack_report) + end + + it "sends a no records moved Slack report and completes the job" do + job.perform + + # expect(job).to have_received(:send_job_slack_report).with(described_class::NO_RECORDS_FOUND_MESSAGE).once + expect(job).to have_received(:complete_returned_appeal_job) + .with(returned_appeal_job, Constants.DISTRIBUTION.no_records_moved_message, []).once + expect(job).to have_received(:send_job_slack_report).with(described_class::NO_RECORDS_FOUND_MESSAGE).once + expect(job).to have_received(:metrics_service_report_runtime).once + end + end + + context "when an error occurs" do + let(:error_message) { "Unexpected error" } + let(:slack_service_instance) { instance_double(SlackService) } + + before do + allow(job).to receive(:eligible_and_moved_appeals).and_raise(StandardError, error_message) + allow(job).to receive(:log_error) + allow(returned_appeal_job).to receive(:update!) + allow(SlackService).to receive(:new).and_return(slack_service_instance) + allow(slack_service_instance).to receive(:send_notification) + end + + it "handles the error, logs it, and sends a Slack notification" do + job.perform + + expect(job).to have_received(:log_error).with(instance_of(StandardError)) + expect(returned_appeal_job).to have_received(:update!) + .with(hash_including(errored_at: kind_of(Time), + stats: "{\"message\":\"Job failed with error: #{error_message}\"}")).once + expect(slack_service_instance).to have_received(:send_notification).with( + a_string_matching(/\n \[ERROR\]/), job.class.name + ).once + expect(job).to have_received(:metrics_service_report_runtime).once + end + end + end + + describe "#non_ssc_avljs" do + let(:job) { described_class.new } + + context "2 non ssc avljs exist" do + let!(:non_ssc_avlj_user_1) { create(:user, :non_ssc_avlj_user) } + let!(:non_ssc_avlj_user_2) { create(:user, :non_ssc_avlj_user) } + let!(:ssc_avlj_user) { create(:user, :ssc_avlj_user) } + + it "returns both non ssc avljs" do + expect(job.send(:non_ssc_avljs)).to eq([non_ssc_avlj_user_1.vacols_staff, non_ssc_avlj_user_2.vacols_staff]) + end + end + + context "1 each of non ssc avlj, ssc avlj, regular vlj, inactive non ssc avlj exist" do + let!(:non_ssc_avlj_user) { create(:user, :non_ssc_avlj_user) } + let!(:inactive_non_ssc_avlj_user) { create(:user, :inactive, :non_ssc_avlj_user) } + let!(:ssc_avlj_user) { create(:user, :ssc_avlj_user) } + let!(:user) { create(:user, :with_vacols_record) } + + before do + inactive_non_ssc_avlj_user.vacols_staff.update!(sactive: "I") + end + + it "returns only the non ssc avlj" do + expect(job.send(:non_ssc_avljs)).to eq([non_ssc_avlj_user.vacols_staff]) + end + end + + context "no non ssc avljs exist" do + let!(:ssc_avlj_user) { create(:user, :ssc_avlj_user) } + + it "returns an empty array" do + expect(job.send(:non_ssc_avljs)).to eq([]) + end + end + end + + describe "#calculate_remaining_appeals" do + let(:job) { described_class.new } + let(:p1) { { "bfkey" => "1", "priority" => 1 } } + let(:p2) { { "bfkey" => "2", "priority" => 1 } } + let(:np1) { { "bfkey" => "3", "priority" => 0 } } + let(:np2) { { "bfkey" => "4", "priority" => 0 } } + + before do + allow(CaseDistributionLever).to receive(:nonsscavlj_number_of_appeals_to_move).and_return(2) + end + + context "2 priority and 2 non-priority legacy appeals tied to non-ssc avljs exist" do + let(:appeals) { [p1, p2, np1, np2] } + let(:p_appeals_moved) { [p1] } + let(:np_appeals_moved) { [np1] } + + it "should return the unmoved legacy appeals" do + returned_reamining_appeals = job.send(:calculate_remaining_appeals, appeals, p_appeals_moved, np_appeals_moved) + expect(returned_reamining_appeals).to eq([[p2], [np2]]) + end + end + + context "2 priority legacy appeals tied to non-ssc avljs exist" do + let(:appeals) { [p1, p2] } + let(:p_appeals_moved) { [p1] } + let(:np_appeals_moved) { [] } + + it "should return the unmoved legacy priority appeal and an empty array of non-priority appeals" do + returned_reamining_appeals = job.send(:calculate_remaining_appeals, appeals, p_appeals_moved, np_appeals_moved) + expect(returned_reamining_appeals).to eq([[p2], []]) + end + end + + context "2 non-priority legacy appeals tied to non-ssc avljsexist" do + let(:appeals) { [np1, np2] } + let(:p_appeals_moved) { [] } + let(:np_appeals_moved) { [np1] } + + it "should return the unmoved legacy non-priority appeal and an empty array of priority appeals" do + returned_reamining_appeals = job.send(:calculate_remaining_appeals, appeals, p_appeals_moved, np_appeals_moved) + expect(returned_reamining_appeals).to eq([[], [np2]]) + end + end + + context "all appeals are moved" do + let(:appeals) { [p1, p2, np1, np2] } + let(:p_appeals_moved) { [p1, p2] } + let(:np_appeals_moved) { [np1, np2] } + + it "should return 2 empty arrays" do + returned_reamining_appeals = job.send(:calculate_remaining_appeals, appeals, p_appeals_moved, np_appeals_moved) + expect(returned_reamining_appeals).to eq([[], []]) + end + end + + context "no legacy appeals tied to non-ssc avljs exist" do + let(:appeals) { [] } + let(:p_appeals_moved) { [] } + let(:np_appeals_moved) { [] } + + it "returns an empty array" do + returned_reamining_appeals = job.send(:calculate_remaining_appeals, appeals, p_appeals_moved, np_appeals_moved) + expect(returned_reamining_appeals).to_not eq([]) + end + end + end + + describe "#filter_appeals" do + let(:job) { described_class.new } + let(:non_ssc_avlj1) { create_non_ssc_avlj("NONSSCAN1", "NonScc User1") } + let(:non_ssc_avlj2) { create_non_ssc_avlj("NONSSCAN2", "NonScc User2") } + let(:non_ssc_avlj1_sattyid) { non_ssc_avlj1.vacols_staff.sattyid } + let(:non_ssc_avlj2_sattyid) { non_ssc_avlj2.vacols_staff.sattyid } + + let(:p1) { { "bfkey" => "1", "priority" => 1, "vlj" => non_ssc_avlj1_sattyid } } + let(:p2) { { "bfkey" => "2", "priority" => 1, "vlj" => non_ssc_avlj2_sattyid } } + let(:np1) { { "bfkey" => "3", "priority" => 0, "vlj" => non_ssc_avlj2_sattyid } } + let(:np2) { { "bfkey" => "4", "priority" => 0, "vlj" => non_ssc_avlj1_sattyid } } + let(:appeals) { [p1, p2, np1, np2] } + + before do + allow(CaseDistributionLever).to receive(:nonsscavlj_number_of_appeals_to_move).and_return(2) + end + + context "a single appeal from each of 2 non ssc avljs gets moved" do + let(:moved_appeals) { [p1, np1] } + + it "returns hash object with correct attributes that match the expected values" do + returned_filtered_appeals_info = job.send(:filter_appeals, appeals, moved_appeals) + expected_returned_object = { + priority_appeals_count: 1, + non_priority_appeals_count: 1, + remaining_priority_appeals_count: 1, + remaining_non_priority_appeals_count: 1, + grouped_by_avlj: [non_ssc_avlj1.vacols_staff.sattyid, non_ssc_avlj2.vacols_staff.sattyid] + } + expect(returned_filtered_appeals_info).to eq(expected_returned_object) + end + end + + context "all appeals from each of 2 non ssc avljs gets moved" do + let(:moved_appeals) { [p1, p2, np1, np2] } + + it "returns hash object with correct attributes that match the expected values" do + returned_filtered_appeals_info = job.send(:filter_appeals, appeals, moved_appeals) + expected_returned_object = { + priority_appeals_count: 2, + non_priority_appeals_count: 2, + remaining_priority_appeals_count: 0, + remaining_non_priority_appeals_count: 0, + grouped_by_avlj: [non_ssc_avlj1.vacols_staff.sattyid, non_ssc_avlj2.vacols_staff.sattyid] + } + expect(returned_filtered_appeals_info).to eq(expected_returned_object) + end + end + + context "no appeals are moved" do + let(:moved_appeals) { [] } + + it "returns hash object with correct attributes that match the expected values" do + returned_filtered_appeals_info = job.send(:filter_appeals, appeals, moved_appeals) + expected_returned_object = { + priority_appeals_count: 0, + non_priority_appeals_count: 0, + remaining_priority_appeals_count: 2, + remaining_non_priority_appeals_count: 2, + grouped_by_avlj: [] + } + expect(returned_filtered_appeals_info).to eq(expected_returned_object) + end + end + + context "no appeals exist" do + let(:moved_appeals) { [] } + let(:appeals) { [] } + + it "returns hash object with correct attributes that match the expected values" do + returned_filtered_appeals_info = job.send(:filter_appeals, appeals, moved_appeals) + expected_returned_object = { + priority_appeals_count: 0, + non_priority_appeals_count: 0, + remaining_priority_appeals_count: 0, + remaining_non_priority_appeals_count: 0, + grouped_by_avlj: [] + } + expect(returned_filtered_appeals_info).to eq(expected_returned_object) + end + end + + context "an extra priority appeal is moved that wasn't in the original list of appeals" do + let(:extra_priority_appeal) { { "bfkey" => "5", "priority" => 1, "vlj" => non_ssc_avlj1_sattyid } } + let(:moved_appeals) { [p1, np1, extra_priority_appeal] } + + it "raises an ERROR" do + expected_msg = "An invalid priority appeal was detected in the list of moved appeals: "\ + "#{[extra_priority_appeal]}" + + expect { job.send(:filter_appeals, appeals, moved_appeals) }.to raise_error(StandardError, expected_msg) + end + end + + context "an extra non-priority appeal is moved that wasn't in the original list of appeals" do + let(:extra_non_priority_appeal) { { "bfkey" => "5", "priority" => 0, "vlj" => non_ssc_avlj1_sattyid } } + let(:moved_appeals) { [p1, np1, extra_non_priority_appeal] } + + it "raises an ERROR" do + expected_msg = "An invalid non-priority appeal was detected in the list of moved appeals: "\ + "#{[extra_non_priority_appeal]}" + + expect { job.send(:filter_appeals, appeals, moved_appeals) }.to raise_error(StandardError, expected_msg) + end + end + end + + describe "#create_returned_appeal_job" do + let(:job) { described_class.new } + + context "when called" do + it "creates a valid ReturnedAppealJob" do + allow(CaseDistributionLever).to receive(:nonsscavlj_number_of_appeals_to_move).and_return(2) + returned_appeal_job = job.send(:create_returned_appeal_job) + expect(returned_appeal_job.started_at).to be_within(1.second).of(Time.zone.now) + expect(returned_appeal_job.stats).to eq({ message: "Job started" }.to_json) + end + end + end + + describe "#send_job_slack_report" do + let(:job) { described_class.new } + let(:slack_service_instance) { instance_double(SlackService) } + + before do + allow(SlackService).to receive(:new).and_return(slack_service_instance) + allow(slack_service_instance).to receive(:send_notification) + end + + context "is passed a valid message array" do + let(:message) do + [ + "Job performed successfully", + "Total Priority Appeals Moved: 5", + "Total Non-Priority Appeals Moved: 3", + "Total Remaining Priority Appeals: 10", + "Total Remaining Non-Priority Appeals: 7", + "SATTYIDs of Non-SSC AVLJs Moved: AVJL1, AVJL" + ] + end + + it "sends the message successfully" do + expected_report = "Job performed successfully\n"\ + "Total Priority Appeals Moved: 5\n"\ + "Total Non-Priority Appeals Moved: 3\n"\ + "Total Remaining Priority Appeals: 10\n"\ + "Total Remaining Non-Priority Appeals: 7\n"\ + "SATTYIDs of Non-SSC AVLJs Moved: AVJL1, AVJL" + + job.send(:send_job_slack_report, message) + expect(slack_service_instance) + .to have_received(:send_notification) + .with(expected_report, "ReturnLegacyAppealsToBoardJob") + end + end + + context "is passed an empty array" do + let(:message) { [] } + it "sends a notification to Slack with the correct message" do + expected_msg = "Slack message cannot be empty or nil" + + expect { job.send(:send_job_slack_report, message) }.to raise_error(StandardError, expected_msg) + end + end + end + + describe "#move_qualifying_appeals" do + let(:job) { described_class.new } + let(:non_ssc_avlj1) { create_non_ssc_avlj("NONSSCAN1", "NonScc User1") } + let(:non_ssc_avlj2) { create_non_ssc_avlj("NONSSCAN2", "NonScc User2") } + let(:non_ssc_avlj1_sattyid) { non_ssc_avlj1.vacols_staff.sattyid } + let(:non_ssc_avlj2_sattyid) { non_ssc_avlj2.vacols_staff.sattyid } + + let(:s1_p_appeal1) { { "bfkey" => "1", "priority" => 1, "vlj" => non_ssc_avlj1_sattyid, "bfd19" => 2.days.ago } } + let(:s1_p_appeal2) { { "bfkey" => "2", "priority" => 1, "vlj" => non_ssc_avlj1_sattyid, "bfd19" => 2.days.ago } } + let(:s1_np_appeal1) { { "bfkey" => "3", "priority" => 0, "vlj" => non_ssc_avlj1_sattyid, "bfd19" => 10.days.ago } } + let(:s1_np_appeal2) { { "bfkey" => "4", "priority" => 0, "vlj" => non_ssc_avlj1_sattyid, "bfd19" => 10.days.ago } } + + let(:s2_p_appeal1) { { "bfkey" => "5", "priority" => 1, "vlj" => non_ssc_avlj2_sattyid, "bfd19" => 2.days.ago } } + let(:s2_p_appeal2) { { "bfkey" => "6", "priority" => 1, "vlj" => non_ssc_avlj2_sattyid, "bfd19" => 2.days.ago } } + let(:s2_np_appeal1) { { "bfkey" => "7", "priority" => 0, "vlj" => non_ssc_avlj2_sattyid, "bfd19" => 10.days.ago } } + let(:s2_np_appeal2) { { "bfkey" => "8", "priority" => 0, "vlj" => non_ssc_avlj2_sattyid, "bfd19" => 10.days.ago } } + + let(:staff1_p_appeals) { [s1_p_appeal1, s1_p_appeal2] } + let(:staff1_np_appeals) { [s1_np_appeal1, s1_np_appeal2] } + let(:staff2_p_appeals) { [s2_p_appeal1, s2_p_appeal2] } + let(:staff2_np_appeals) { [s2_np_appeal1, s2_np_appeal2] } + let(:appeals) do + [ + s1_p_appeal1, + s1_p_appeal2, + s1_np_appeal1, + s1_np_appeal2, + s2_p_appeal1, + s2_p_appeal2, + s2_np_appeal1, + s2_np_appeal2 + ] + end + + before do + allow(CaseDistributionLever).to receive(:nonsscavlj_number_of_appeals_to_move).and_return(2) + allow(VACOLS::Case).to receive(:batch_update_vacols_location) + end + + context "limit is set to 2 per non ssc avlj" do + it "moves the 2 priority appeals per non ssc avlj" do + expected_moved_appeals = [s1_p_appeal1, s1_p_appeal2, s2_p_appeal1, s2_p_appeal2] + expected_moved_appeal_bf_keys = expected_moved_appeals.map { |m_appeal| m_appeal["bfkey"] } + + moved_appeals = job.send(:move_qualifying_appeals, appeals) + + expect(moved_appeals).to match_array(expected_moved_appeals) + expect(VACOLS::Case).to have_received(:batch_update_vacols_location) + .with("63", match_array(expected_moved_appeal_bf_keys)) + end + end + + context "limit is set to 1 per non ssc avlj" do + before do + allow(CaseDistributionLever).to receive(:nonsscavlj_number_of_appeals_to_move).and_return(1) + end + + it "moves the oldest priority appeals per non ssc avlj" do + s1_p_appeal1.update("bfd19" => 15.days.ago) + s1_p_appeal2.update("bfd19" => 20.days.ago) + s2_p_appeal1.update("bfd19" => 80.days.ago) + s2_p_appeal2.update("bfd19" => 40.days.ago) + + expected_moved_appeals = [s1_p_appeal2, s2_p_appeal1] + expected_moved_appeal_bf_keys = expected_moved_appeals.map { |m_appeal| m_appeal["bfkey"] } + + moved_appeals = job.send(:move_qualifying_appeals, appeals) + expect(moved_appeals).to match_array(expected_moved_appeals) + expect(VACOLS::Case).to have_received(:batch_update_vacols_location) + .with("63", match_array(expected_moved_appeal_bf_keys)) + end + end + + context "limit is set to 10 per non ssc avlj" do + before do + allow(CaseDistributionLever).to receive(:nonsscavlj_number_of_appeals_to_move).and_return(10) + end + + it "moves all appeals" do + expected_moved_appeals = appeals + expected_moved_appeal_bf_keys = expected_moved_appeals.map { |m_appeal| m_appeal["bfkey"] } + + moved_appeals = job.send(:move_qualifying_appeals, appeals) + expect(moved_appeals).to match_array(expected_moved_appeals) + expect(VACOLS::Case).to have_received(:batch_update_vacols_location) + .with("63", match_array(expected_moved_appeal_bf_keys)) + end + end + + context "there are no non_ssc_avljs" do + before do + allow(CaseDistributionLever).to receive(:nonsscavlj_number_of_appeals_to_move).and_return(10) + allow(job).to receive(:non_ssc_avljs).and_return([]) + end + + it "returns and empty array and VACOLS::Case.batch_update_vacols_location does not run doesn't run" do + expected_moved_appeals = [] + + moved_appeals = job.send(:move_qualifying_appeals, appeals) + expect(moved_appeals).to match_array(expected_moved_appeals) + expect(VACOLS::Case).to_not have_received(:batch_update_vacols_location) + end + end + + context "there are no appeals" do + it "returns an empty array and VACOLS::Case.batch_update_vacols_location does not run doesn't run" do + expected_moved_appeals = [] + + moved_appeals = job.send(:move_qualifying_appeals, []) + expect(moved_appeals).to match_array(expected_moved_appeals) + expect(VACOLS::Case).to_not have_received(:batch_update_vacols_location) + end + end + + context "the lever is set with a value 0" do + before do + allow(CaseDistributionLever).to receive(:nonsscavlj_number_of_appeals_to_move).and_return(0) + end + + it "returns an empty array and VACOLS::Case.batch_update_vacols_location does not run doesn't run" do + expected_moved_appeals = [] + + moved_appeals = job.send(:move_qualifying_appeals, appeals) + expect(moved_appeals).to match_array(expected_moved_appeals) + expect(VACOLS::Case).to_not have_received(:batch_update_vacols_location) + end + end + + context "the lever is set with a value below 0" do + before do + allow(CaseDistributionLever).to receive(:nonsscavlj_number_of_appeals_to_move).and_return(-1) + end + + it "it raises an ERROR message and VACOLS::Case.batch_update_vacols_location does not run doesn't run" do + expected_msg = "CaseDistributionLever.nonsscavlj_number_of_appeals_to_move set below 0" + + expect { job.send(:move_qualifying_appeals, appeals) }.to raise_error(StandardError, expected_msg) + expect(VACOLS::Case).to_not have_received(:batch_update_vacols_location) + end + end + end + + describe "#get_tied_appeal_bfkeys" do + let(:job) { described_class.new } + let(:appeal_1) { { "priority" => 0, "bfd19" => 10.days.ago, "bfkey" => "1" } } + let(:appeal_2) { { "priority" => 1, "bfd19" => 8.days.ago, "bfkey" => "2" } } + let(:appeal_3) { { "priority" => 0, "bfd19" => 6.days.ago, "bfkey" => "3" } } + let(:appeal_4) { { "priority" => 1, "bfd19" => 4.days.ago, "bfkey" => "4" } } + + context "with a mix of priority and non-priority appeals" do + let(:tied_appeals) { [appeal_1, appeal_2, appeal_3, appeal_4] } + + it "returns the keys sorted by priority and then bfd19" do + allow(CaseDistributionLever).to receive(:nonsscavlj_number_of_appeals_to_move).and_return(2) + result = job.send(:get_tied_appeal_bfkeys, tied_appeals) + expect(result).to eq(%w[2 4 1 3]) + end + end + end + + describe "#update_qualifying_appeals_bfkeys" do + let(:job) { described_class.new } + let(:nonsscavlj_number_of_appeals_to_move_count) { 2 } + + before do + allow(CaseDistributionLever).to receive(:nonsscavlj_number_of_appeals_to_move) + .and_return(nonsscavlj_number_of_appeals_to_move_count) + end + + context "maximum moved appeals per non ssc avlj is 2 and a starting bfkey list of 2 and a tied list of 4 keys" do + let(:tied_appeals_bfkeys) { %w[3 4 5 6] } + let(:qualifying_appeals_bfkeys) { %w[1 2] } + + it "adds 2 keys to qualifying bfkey list" do + appeals = job.send(:update_qualifying_appeals_bfkeys, tied_appeals_bfkeys, qualifying_appeals_bfkeys) + + expect(appeals).to eq(%w[1 2 3 4]) + end + end + + context "maximum moved appeals per non ssc avlj is 4 and a starting bfkey list of 2 and a tied list of 4 keys" do + let(:nonsscavlj_number_of_appeals_to_move_count) { 4 } + let(:tied_appeals_bfkeys) { %w[3 4 5 6] } + let(:qualifying_appeals_bfkeys) { %w[1 2] } + + it "adds all tied keys to qualifying bfkey list" do + appeals = job.send(:update_qualifying_appeals_bfkeys, tied_appeals_bfkeys, qualifying_appeals_bfkeys) + + expect(appeals).to eq(%w[1 2 3 4 5 6]) + end + end + + context "maximum moved appeals per non ssc avlj is higher than the length of the tied list and a starting bfkey "\ + "list of 2 and a tied list of 4 keys" do + let(:nonsscavlj_number_of_appeals_to_move_count) { 10 } + let(:tied_appeals_bfkeys) { %w[3 4 5 6] } + let(:qualifying_appeals_bfkeys) { %w[1 2] } + + it "adds all tied keys to qualifying bfkey list" do + appeals = job.send(:update_qualifying_appeals_bfkeys, tied_appeals_bfkeys, qualifying_appeals_bfkeys) + + expect(appeals).to eq(%w[1 2 3 4 5 6]) + end + end + + context "maximum moved appeals per non ssc avlj is 2 and starting bfkey list is empty and a tied list of 4 keys" do + let(:tied_appeals_bfkeys) { %w[3 4 5 6] } + let(:qualifying_appeals_bfkeys) { [] } + + it "adds 2 tied keys to qualifying bfkey list" do + appeals = job.send(:update_qualifying_appeals_bfkeys, tied_appeals_bfkeys, qualifying_appeals_bfkeys) + + expect(appeals).to eq(%w[3 4]) + end + end + + context "maximum moved appeals per non ssc avlj is 2 and starting bfkey list of 2 keys and a tied list is empty" do + let(:tied_appeals_bfkeys) { [] } + let(:qualifying_appeals_bfkeys) { %w[1 2] } + + it "adds no tied keys to qualifying bfkey list" do + appeals = job.send(:update_qualifying_appeals_bfkeys, tied_appeals_bfkeys, qualifying_appeals_bfkeys) + + expect(appeals).to eq(%w[1 2]) + end + end + + context "maximum moved appeals per non ssc avlj is 2 and a starting bfkey list is empty and a tied list is empty" do + let(:tied_appeals_bfkeys) { [] } + let(:qualifying_appeals_bfkeys) { [] } + + it "adds no tied keys to qualifying bfkey list and list is empty" do + appeals = job.send(:update_qualifying_appeals_bfkeys, tied_appeals_bfkeys, qualifying_appeals_bfkeys) + + expect(appeals).to eq([]) + end + end + + context "lever is set to 0" do + let(:nonsscavlj_number_of_appeals_to_move_count) { 0 } + let(:tied_appeals_bfkeys) { %w[3 4 5 6] } + let(:qualifying_appeals_bfkeys) { %w[1 2] } + + it "returns an unchanged array" do + appeals = job.send(:update_qualifying_appeals_bfkeys, tied_appeals_bfkeys, qualifying_appeals_bfkeys) + + expect(appeals).to eq(%w[1 2]) + end + end + + context "lever is set to below 0" do + let(:nonsscavlj_number_of_appeals_to_move_count) { -1 } + let(:tied_appeals_bfkeys) { %w[3 4 5 6] } + let(:qualifying_appeals_bfkeys) { %w[1 2] } + let(:message) { "CaseDistributionLever.nonsscavlj_number_of_appeals_to_move set below 0" } + + it "raises an error saying the lever has been set incorrectly" do + expect { job.send(:update_qualifying_appeals_bfkeys, tied_appeals_bfkeys, qualifying_appeals_bfkeys) } + .to raise_error(StandardError, message) + end + end + end + + def create_non_ssc_avlj(css_id, full_name) + User.find_by_css_id(css_id) || + create(:user, :non_ssc_avlj_user, css_id: css_id, full_name: full_name) + end +end diff --git a/spec/jobs/send_notification_job_spec.rb b/spec/jobs/send_notification_job_spec.rb index 32ebb4b8296..cde49138139 100644 --- a/spec/jobs/send_notification_job_spec.rb +++ b/spec/jobs/send_notification_job_spec.rb @@ -178,21 +178,11 @@ context "#queue_name_suffix" do subject { described_class.queue_name_suffix } - it "returns non-FIFO name in development environment" do - is_expected.to eq :send_notifications - end - - it "returns FIFO name in non-development environment" do - allow(ApplicationController).to receive(:dependencies_faked?).and_return(false) - + it "returns FIFO name" do is_expected.to eq :"send_notifications.fifo" end end - it "it is the correct queue" do - expect(SendNotificationJob.new.queue_name).to eq(queue_name) - end - context ".perform" do subject(:job) { SendNotificationJob.perform_later(good_message.to_json) } diff --git a/spec/jobs/sync_reviews_job_spec.rb b/spec/jobs/sync_reviews_job_spec.rb index a285d531439..369c93b2e8d 100644 --- a/spec/jobs/sync_reviews_job_spec.rb +++ b/spec/jobs/sync_reviews_job_spec.rb @@ -143,7 +143,7 @@ SyncReviewsJob.perform_now end.to have_enqueued_job( ProcessDecisionDocumentJob - ).with(decision_document_needs_reprocessing.id).exactly(:once) + ).with(decision_document_needs_reprocessing.id, false).exactly(:once) end end end diff --git a/spec/jobs/va_notify_status_update_job_spec.rb b/spec/jobs/va_notify_status_update_job_spec.rb deleted file mode 100644 index 4c6f2c65842..00000000000 --- a/spec/jobs/va_notify_status_update_job_spec.rb +++ /dev/null @@ -1,271 +0,0 @@ -# frozen_string_literal: true - -describe VANotifyStatusUpdateJob, type: :job do - include ActiveJob::TestHelper - let(:current_user) { create(:user, roles: ["System Admin"]) } - let(:notifications_email_only) do - FactoryBot.create_list :notification_email_only, 10 - end - let(:notifications_sms_only) do - FactoryBot.create_list :notification_sms_only, 10 - end - let(:notifications_email_and_sms) do - FactoryBot.create_list :notification_email_and_sms, 10 - end - let(:email_only) do - create(:notification, - appeals_id: "5d70058f-8641-4155-bae8-5af4b61b1576", - appeals_type: "Appeal", - event_type: Constants.EVENT_TYPE_FILTERS.hearing_scheduled, - event_date: Time.zone.today, - notification_type: "Email", - email_notification_status: "Success") - end - let(:sms_only) do - create(:notification, - appeals_id: "5d70058f-8641-4155-bae8-5af4b61b1576", - appeals_type: "Appeal", - event_type: Constants.EVENT_TYPE_FILTERS.hearing_scheduled, - event_date: Time.zone.today, - notification_type: "SMS", - sms_notification_status: "Success") - end - let(:email_and_sms) do - create(:notification, - appeals_id: "5d70058f-8641-4155-bae8-5af4b61b1576", - appeals_type: "Appeal", - event_type: Constants.EVENT_TYPE_FILTERS.hearing_scheduled, - event_date: Time.zone.today, - notification_type: "Email and SMS", - email_notification_status: "Success", - sms_notification_status: "Success") - end - let(:notification_collection) do - create(:notification, - appeals_id: "5d70058f-8641-4155-bae8-5af4b61b1576", - appeals_type: "Appeal", - event_type: Constants.EVENT_TYPE_FILTERS.hearing_scheduled, - event_date: Time.zone.today, - notification_type: "Email", - email_notification_external_id: "0", - sms_notification_external_id: nil, - email_notification_status: "Success", - created_at: Time.zone.now) - create(:notification, - appeals_id: "5d70058f-8641-4155-bae8-5af4b61b1576", - appeals_type: "Appeal", - event_type: Constants.EVENT_TYPE_FILTERS.hearing_scheduled, - event_date: Time.zone.today, - notification_type: "SMS", - email_notification_external_id: nil, - sms_notification_external_id: "0", - sms_notification_status: "temporary-failure", - created_at: Time.zone.now) - create(:notification, - appeals_id: "5d70058f-8641-4155-bae8-5af4b61b1576", - appeals_type: "Appeal", - event_type: Constants.EVENT_TYPE_FILTERS.hearing_scheduled, - event_date: Time.zone.today, - notification_type: "SMS", - email_notification_external_id: nil, - sms_notification_external_id: "1", - sms_notification_status: "created", - created_at: Time.zone.now) - create(:notification, - appeals_id: "5d70058f-8641-4155-bae8-5af4b61b1576", - appeals_type: "Appeal", - event_type: Constants.EVENT_TYPE_FILTERS.hearing_scheduled, - event_date: Time.zone.today, - notification_type: "Email", - email_notification_external_id: "1", - sms_notification_external_id: nil, - email_notification_status: "technical-failure", - created_at: Time.zone.now) - create(:notification, - appeals_id: "5d70058f-8641-4155-bae8-5af4b61b1576", - appeals_type: "Appeal", - event_type: Constants.EVENT_TYPE_FILTERS.hearing_scheduled, - event_date: Time.zone.today, - notification_type: "Email and SMS", - email_notification_external_id: "2", - sms_notification_external_id: "2", - email_notification_status: "temporary-failure", - sms_notification_status: "temporary-failure", - created_at: Time.zone.now - 5.days) - create(:notification, - appeals_id: "5d70058f-8641-4155-bae8-5af4b61b1576", - appeals_type: "Appeal", - event_type: Constants.EVENT_TYPE_FILTERS.hearing_scheduled, - event_date: Time.zone.today, - notification_type: "Email and SMS", - email_notification_external_id: "3", - sms_notification_external_id: "3", - email_notification_status: "delivered", - sms_notification_status: "delivered", - created_at: Time.zone.now - 5.days) - create(:notification, - appeals_id: "5d70058f-8641-4155-bae8-5af4b61b1577", - appeals_type: "Appeal", - event_type: Constants.EVENT_TYPE_FILTERS.hearing_scheduled, - event_date: Time.zone.today, - notification_type: "Email and SMS", - email_notification_external_id: "4", - sms_notification_external_id: "4", - email_notification_status: "delivered", - sms_notification_status: "delivered", - created_at: Time.zone.now - 5.days) - end - - let(:collect) { Notification.where(id: [1, 2, 3, 4, 5]) } - - let(:queue_name) { "caseflow_test_low_priority" } - - before do - Seeds::NotificationEvents.new.seed! - end - - after(:each) do - clear_enqueued_jobs - clear_performed_jobs - end - - it "it is the correct queue" do - expect(VANotifyStatusUpdateJob.new.queue_name).to eq(queue_name) - end - - context ".perform" do - subject(:job) { VANotifyStatusUpdateJob.perform_later } - describe "send message to queue" do - it "has one message in queue" do - expect { job }.to change(ActiveJob::Base.queue_adapter.enqueued_jobs, :size).by(1) - end - - it "processes message" do - perform_enqueued_jobs do - result = VANotifyStatusUpdateJob.perform_later - expect(result.arguments[0]).to eq(nil) - end - end - - it "sends to VA Notify when no errors are present" do - expect(Rails.logger).not_to receive(:error) - expect { VANotifyStatusUpdateJob.perform_now.to receive(:send_to_va_notify) } - end - - it "defaults to 650 for the query limit if environment variable not found or invalid" do - stub_const("VANotifyStatusUpdateJob::QUERY_LIMIT", nil) - expect(Rails.logger).to receive(:info) - .with("VANotifyStatusJob can not read the VA_NOTIFY_STATUS_UPDATE_BATCH_LIMIT environment variable.\ - Defaulting to 650.") - VANotifyStatusUpdateJob.perform_now - end - - it "logs out an error to Raven when email type that is not Email or SMS is found" do - external_id = SecureRandom.uuid - email_only.update!(email_notification_external_id: external_id) - job_instance = VANotifyStatusUpdateJob.new - external_id = SecureRandom.uuid - result = job_instance.send(:get_current_status, external_id, "None") - expect(result).to eq(false) - end - end - - describe "feature flags" do - describe "Email" do - it "updates the Notification when successful" do - email_only.email_notification_external_id = SecureRandom.uuid - allow(job).to receive(:notifications_not_processed).and_return([email_only]) - job.perform_now - expect(email_only.email_notification_status).to eq("created") - end - it "logs when external id is not present" do - allow(job).to receive(:notifications_not_processed).and_return([email_only]) - job.perform_now - expect(email_only.email_notification_status).to eq("No External Id") - end - end - - describe "SMS" do - it "updates the Notification when successful" do - sms_only.sms_notification_external_id = SecureRandom.uuid - allow(job).to receive(:notifications_not_processed).and_return([sms_only]) - job.perform_now - expect(sms_only.sms_notification_status).to eq("created") - end - it "logs when external id is not present" do - allow(job).to receive(:notifications_not_processed).and_return([sms_only]) - job.perform_now - expect(sms_only.sms_notification_status).to eq("No External Id") - end - end - - describe "Email and SMS" do - it "updates the Notification when successful" do - email_and_sms.sms_notification_external_id = SecureRandom.uuid - email_and_sms.email_notification_external_id = SecureRandom.uuid - allow(job).to receive(:notifications_not_processed).and_return([email_and_sms]) - job.perform_now - expect(email_and_sms.sms_notification_status && email_and_sms.email_notification_status).to eq("created") - end - it "logs when external id is not present" do - allow(job).to receive(:notifications_not_processed).and_return([email_and_sms]) - job.perform_now - expect(email_and_sms.sms_notification_status && - email_and_sms.email_notification_status).to eq("No External Id") - end - - it "updates the email and sms notification status if an external id is found" do - email_and_sms.update!(sms_notification_external_id: SecureRandom.uuid, - email_notification_external_id: SecureRandom.uuid) - job.perform_now - notification = Notification.first - expect(notification.email_notification_status && notification.sms_notification_status).to eq("created") - end - end - end - end - - context "#get_current_status" do - subject(:job) { VANotifyStatusUpdateJob.perform_later } - it "handles VA Notify errors" do - email_and_sms.sms_notification_external_id = SecureRandom.uuid - email_and_sms.email_notification_external_id = SecureRandom.uuid - allow(job).to receive(:notifications_not_processed).and_return([email_and_sms]) - allow(VANotifyService).to receive(:get_status).and_raise(Caseflow::Error::VANotifyNotFoundError) - expect(job).to receive(:log_error).with(/VA Notify API returned error/).twice - job.perform_now - end - end - - context "#notifications_not_processed" do - subject(:job) { VANotifyStatusUpdateJob.perform_later } - it "queries the notification table using activerecord" do - allow(job).to receive(:find_notifications_not_processed).and_return([]) - expect(job.send(:find_notifications_not_processed)) - job.perform_now - end - end - - context "#find_notif_not_processed" do - subject(:job) { VANotifyStatusUpdateJob.perform_later } - it "returns a collection of notifications from the DB that hold the qualifying statuses" do - notification_collection - expect(job.send(:find_notifications_not_processed)).not_to include(Notification.where(id: [6, 7])) - end - end - - context "#default_to_650" do - before do - VANotifyStatusUpdateJob::QUERY_LIMIT = nil - end - - subject(:job) { VANotifyStatusUpdateJob.perform_later } - it "defaults to 650" do - expect(Rails.logger).to receive(:info).with( - "VANotifyStatusJob can not read the VA_NOTIFY_STATUS_UPDATE_BATCH_LIMIT environment variable.\ - Defaulting to 650." - ) - job.perform - end - end -end diff --git a/spec/jobs/virtual_hearings/delete_conferences_job_spec.rb b/spec/jobs/virtual_hearings/delete_conferences_job_spec.rb index 527fda1e772..55bf82f74b1 100644 --- a/spec/jobs/virtual_hearings/delete_conferences_job_spec.rb +++ b/spec/jobs/virtual_hearings/delete_conferences_job_spec.rb @@ -188,8 +188,7 @@ expect(MetricsService).to receive(:increment_counter).with( hash_including( metric_name: "deleted_conferences.successful", - metric_group: Constants.DATADOG_METRICS.HEARINGS.VIRTUAL_HEARINGS_GROUP_NAME, - by: 2 + metric_group: Constants.DATADOG_METRICS.HEARINGS.VIRTUAL_HEARINGS_GROUP_NAME ) ) subject @@ -213,8 +212,7 @@ expect(MetricsService).to receive(:increment_counter).with( hash_including( metric_name: "deleted_conferences.failed", - metric_group: Constants.DATADOG_METRICS.HEARINGS.VIRTUAL_HEARINGS_GROUP_NAME, - by: 2 + metric_group: Constants.DATADOG_METRICS.HEARINGS.VIRTUAL_HEARINGS_GROUP_NAME ) ) subject @@ -241,8 +239,7 @@ expect(MetricsService).to receive(:increment_counter).with( hash_including( metric_name: "deleted_conferences.successful", - metric_group: Constants.DATADOG_METRICS.HEARINGS.VIRTUAL_HEARINGS_GROUP_NAME, - by: 2 + metric_group: Constants.DATADOG_METRICS.HEARINGS.VIRTUAL_HEARINGS_GROUP_NAME ) ) subject @@ -270,8 +267,7 @@ expect(MetricsService).to receive(:increment_counter).with( hash_including( metric_name: "deleted_conferences.failed", - metric_group: Constants.DATADOG_METRICS.HEARINGS.VIRTUAL_HEARINGS_GROUP_NAME, - by: 2 + metric_group: Constants.DATADOG_METRICS.HEARINGS.VIRTUAL_HEARINGS_GROUP_NAME ) ) subject @@ -295,16 +291,14 @@ expect(MetricsService).to receive(:increment_counter).with( hash_including( metric_name: "deleted_conferences.successful", - metric_group: Constants.DATADOG_METRICS.HEARINGS.VIRTUAL_HEARINGS_GROUP_NAME, - by: 2 + metric_group: Constants.DATADOG_METRICS.HEARINGS.VIRTUAL_HEARINGS_GROUP_NAME ) ) expect(MetricsService).to receive(:increment_counter).with( hash_including( metric_name: "deleted_conferences.failed", - metric_group: Constants.DATADOG_METRICS.HEARINGS.VIRTUAL_HEARINGS_GROUP_NAME, - by: 2 + metric_group: Constants.DATADOG_METRICS.HEARINGS.VIRTUAL_HEARINGS_GROUP_NAME ) ) diff --git a/spec/models/appellant_notification_spec.rb b/spec/models/appellant_notification_spec.rb index 9b09924d134..4c97d740f4d 100644 --- a/spec/models/appellant_notification_spec.rb +++ b/spec/models/appellant_notification_spec.rb @@ -3,21 +3,22 @@ describe AppellantNotification do describe "class methods" do describe "self.handle_errors" do - let(:appeal) { create(:appeal, number_of_claimants: 1) } + let(:template_name) { "Quarterly Notification" } + let(:appeal) { create(:appeal, :active, number_of_claimants: 1) } let(:current_user) { User.system_user } context "if appeal is nil" do let(:empty_appeal) {} it "reports the error" do - expect { AppellantNotification.handle_errors(empty_appeal) }.to raise_error( + expect { AppellantNotification.handle_errors(empty_appeal, template_name) }.to raise_error( AppellantNotification::NoAppealError ) end end context "with no claimant listed" do - let(:appeal) { create(:appeal, number_of_claimants: 0) } + let(:appeal) { create(:appeal, :active, number_of_claimants: 0) } it "returns error message" do - expect(AppellantNotification.handle_errors(appeal)[:status]).to eq( + expect(AppellantNotification.handle_errors(appeal, template_name)[:status]).to eq( AppellantNotification::NoClaimantError.new(appeal.id).status ) end @@ -25,31 +26,42 @@ context "with no participant_id listed" do let(:claimant) { create(:claimant, participant_id: "") } - let(:appeal) { create(:appeal) } + let(:appeal) { create(:appeal, :active) } before do appeal.claimants = [claimant] end it "returns error message" do - expect(AppellantNotification.handle_errors(appeal)[:status]).to eq( + expect(AppellantNotification.handle_errors(appeal, template_name)[:status]).to eq( AppellantNotification::NoParticipantIdError.new(appeal.id).status ) end end + context "with an inactive appeal" do + let(:appeal) { create(:appeal, :active, number_of_claimants: 1) } + it "returns error message" do + appeal.root_task.completed! + expect { AppellantNotification.handle_errors(appeal, template_name) }.to raise_error( + AppellantNotification::InactiveAppealError + ) + end + end + context "with no errors" do it "doesn't raise" do - expect(AppellantNotification.handle_errors(appeal)[:status]).to eq "Success" + expect(AppellantNotification.handle_errors(appeal, template_name)[:status]).to eq "Success" end end end describe "veteran is deceased" do - let(:appeal) { create(:appeal, number_of_claimants: 1) } + let(:appeal) { create(:appeal, :active, number_of_claimants: 1) } let(:substitute_appellant) { create(:appellant_substitution) } + let(:template_name) { "test" } it "with no substitute appellant" do appeal.veteran.update!(date_of_death: Time.zone.today) - expect(AppellantNotification.handle_errors(appeal)[:status]).to eq "Failure Due to Deceased" + expect(AppellantNotification.handle_errors(appeal, template_name)[:status]).to eq "Failure Due to Deceased" end it "with substitute appellant" do @@ -57,13 +69,13 @@ substitute_appellant.update!(source_appeal_id: appeal.id) substitute_appellant.send(:establish_substitution_on_same_appeal) appeal.update!(veteran_is_not_claimant: true) - expect(AppellantNotification.handle_errors(appeal)[:status]).to eq "Success" + expect(AppellantNotification.handle_errors(appeal, template_name)[:status]).to eq "Success" end end describe "self.create_payload" do - let(:good_appeal) { create(:appeal, number_of_claimants: 1) } - let(:bad_appeal) { create(:appeal) } + let(:good_appeal) { create(:appeal, :active, number_of_claimants: 1) } + let(:bad_appeal) { create(:appeal, :active) } let(:bad_claimant) { create(:claimant, participant_id: "") } let(:template_name) { "test" } @@ -148,14 +160,14 @@ it "Will notify appellant that the legacy appeal decision has been mailed (Non Contested)" do expect(AppellantNotification).to receive(:notify_appellant).with(legacy_appeal, non_contested) decision_document = dispatch.send dispatch_func, params - decision_document.process! + decision_document.process!(false) end it "Will notify appellant that the legacy appeal decision has been mailed (Contested)" do expect(AppellantNotification).to receive(:notify_appellant).with(legacy_appeal, contested) allow(legacy_appeal).to receive(:contested_claim).and_return(true) legacy_appeal.contested_claim decision_document = dispatch.send dispatch_func, params - decision_document.process! + decision_document.process!(true) end end @@ -205,7 +217,7 @@ it "Will notify appellant that the AMA appeal decision has been mailed (Non Contested)" do expect(AppellantNotification).to receive(:notify_appellant).with(appeal, non_contested) decision_document = dispatch.send dispatch_func, params - decision_document.process! + decision_document.process!(false) end it "Will notify appellant that the AMA appeal decision has been mailed (Contested)" do expect(AppellantNotification).to receive(:notify_appellant).with(contested_appeal, contested) @@ -213,7 +225,7 @@ contested_appeal.contested_claim? contested_decision_document = contested_dispatch .send dispatch_func, contested_params - contested_decision_document.process! + contested_decision_document.process!(true) end end end @@ -570,7 +582,7 @@ # Note: only privacyactrequestmailtask is tested because the process is the same as foiarequestmailtask describe "mail task" do - let(:appeal) { create(:appeal) } + let(:appeal) { create(:appeal, :active) } let(:appeal_state) { create(:appeal_state, appeal_id: appeal.id, appeal_type: appeal.class.to_s) } let(:current_user) { create(:user) } let(:priv_org) { PrivacyTeam.singleton } @@ -640,7 +652,7 @@ end context "Foia Colocated Tasks" do - let(:appeal) { create(:appeal) } + let(:appeal) { create(:appeal, :active) } let(:appeal_state) { create(:appeal_state, appeal_id: appeal.id, appeal_type: appeal.class.to_s) } let!(:attorney) { create(:user) } let!(:attorney_task) { create(:ama_attorney_task, appeal: appeal, assigned_to: attorney) } @@ -691,7 +703,7 @@ end context "Privacy Act Tasks" do - let(:appeal) { create(:appeal) } + let(:appeal) { create(:appeal, :active) } let(:appeal_state) { create(:appeal_state, appeal_id: appeal.id, appeal_type: appeal.class.to_s) } let(:attorney) { create(:user) } let(:current_user) { create(:user) } @@ -929,6 +941,12 @@ let(:task) { create(:informal_hearing_presentation_task, :in_progress, assigned_to: org) } let(:appeal_state) { create(:appeal_state, appeal_id: task.appeal.id, appeal_type: task.appeal.class.to_s) } let(:template_name) { Constants.EVENT_TYPE_FILTERS.vso_ihp_complete } + let(:appeal) { task.appeal } + + before do + InitialTasksFactory.new(appeal).create_root_and_sub_tasks! + end + it "will notify the appellant of the 'IhpTaskComplete' status" do allow(task).to receive(:verify_user_can_update!).with(user).and_return(true) expect(AppellantNotification).to receive(:notify_appellant).with(task.appeal, template_name) diff --git a/spec/models/case_distribution_lever_spec.rb b/spec/models/case_distribution_lever_spec.rb index d0f5850f7a9..2940f835bbd 100644 --- a/spec/models/case_distribution_lever_spec.rb +++ b/spec/models/case_distribution_lever_spec.rb @@ -17,7 +17,8 @@ ama_evidence_submission_docket_time_goals ama_hearing_docket_time_goals ama_hearing_start_distribution_prior_to_goals - ama_evidence_submission_start_distribution_prior_to_goals] + ama_evidence_submission_start_distribution_prior_to_goals + nonsscavlj_number_of_appeals_to_move] end let!(:float_levers) do %w[maximum_direct_review_proportion minimum_legacy_proportion nod_adjustment] diff --git a/spec/models/decision_document_spec.rb b/spec/models/decision_document_spec.rb index a90aba992cb..9896879c68d 100644 --- a/spec/models/decision_document_spec.rb +++ b/spec/models/decision_document_spec.rb @@ -133,7 +133,7 @@ end context "#process!" do - subject { decision_document.process! } + subject { decision_document.process!(false) } before do allow(decision_document).to receive(:submitted_and_ready?).and_return(true) @@ -141,6 +141,7 @@ allow(VBMSService).to receive(:establish_claim!).and_call_original allow(VBMSService).to receive(:create_contentions!).and_call_original FeatureToggle.enable!(:send_email_for_dispatched_appeals) + InitialTasksFactory.new(decision_document.appeal).create_root_and_sub_tasks! end after { FeatureToggle.disable!(:send_email_for_dispatched_appeals) } diff --git a/spec/models/returned_appeal_job_spec.rb b/spec/models/returned_appeal_job_spec.rb new file mode 100644 index 00000000000..8c8e5989df8 --- /dev/null +++ b/spec/models/returned_appeal_job_spec.rb @@ -0,0 +1,9 @@ +# frozen_string_literal: true + +RSpec.describe ReturnedAppealJob, :all_dbs do + describe "factory" do + it "is valid" do + expect(build(:returned_appeal_job)).to be_valid + end + end +end diff --git a/spec/models/serializers/work_queue/appeal_search_serializer_spec.rb b/spec/models/serializers/work_queue/appeal_search_serializer_spec.rb new file mode 100644 index 00000000000..686d9e68074 --- /dev/null +++ b/spec/models/serializers/work_queue/appeal_search_serializer_spec.rb @@ -0,0 +1,183 @@ +# frozen_string_literal: true + +require "rails_helper" + +describe WorkQueue::AppealSearchSerializer, :all_dbs do + describe "#assigned_to_location" do + context "when appeal status is distributed to judge" do + let!(:judge_user) { create(:user, :with_vacols_judge_record, full_name: "Judge Judy", css_id: "JUDGE_J") } + let(:appeal) { create(:appeal, :assigned_to_judge, associated_judge: judge_user) } + + before do + User.authenticate!(user: judge_user) + end + + subject { described_class.new(appeal, params: { user: judge_user }) } + + context "and user is a board judge" do + it "shows CSS ID" do + expect(subject.serializable_hash[:data][:attributes][:assigned_to_location]) + .to eq(appeal.assigned_to_location) + end + end + + context "when appeal status is assigned to attorney" do + let(:appeal) { create(:appeal, :at_attorney_drafting) } + let!(:attorney_user) { create(:user) } + let!(:vacols_atty) { create(:staff, :attorney_role, sdomainid: attorney_user.css_id) } + + before do + User.authenticate!(user: attorney_user) + end + + subject { described_class.new(appeal, params: { user: attorney_user }) } + + context "and user is a board attorney" do + it "shows CSS ID" do + expect(subject.serializable_hash[:data][:attributes][:assigned_to_location]) + .to eq(appeal.assigned_to_location) + end + end + end + + context "when appeal status is ready for signature" do + let!(:judge_user) { create(:user, :with_vacols_judge_record, full_name: "Judge Judy", css_id: "JUDGE_J") } + let(:appeal) { create(:appeal, :at_judge_review, associated_judge: judge_user) } + let!(:hearings_coordinator_user) do + coordinator = create(:hearings_coordinator) + HearingsManagement.singleton.add_user(coordinator) + coordinator + end + + before do + User.authenticate!(user: hearings_coordinator_user) + end + + subject { described_class.new(appeal, params: { user: hearings_coordinator_user }) } + + context "and user is a hearings coordinator" do + it "shows CSS ID" do + expect(subject.serializable_hash[:data][:attributes][:assigned_to_location]) + .to eq(appeal.assigned_to_location) + end + end + end + + context "when status is distributed to judge" do + let!(:judge_user) { create(:user, :with_vacols_judge_record, full_name: "Judge Judy", css_id: "JUDGE_J") } + let(:appeal) { create(:appeal, :assigned_to_judge, associated_judge: judge_user) } + let(:vso_user) { create(:user, :vso_role) } + + before do + User.authenticate!(user: vso_user) + end + + subject { described_class.new(appeal, params: { user: vso_user }) } + + it "does not show CSS ID to VSO user" do + expect(subject.serializable_hash[:data][:attributes][:assigned_to_location]).to be_nil + end + end + + context "when status is ready for signature" do + let!(:judge_user) { create(:user, :with_vacols_judge_record, full_name: "Judge Judy", css_id: "JUDGE_J") } + let(:appeal) { create(:appeal, :at_judge_review, associated_judge: judge_user) } + let(:vso_user) { create(:user, :vso_role) } + + before do + User.authenticate!(user: vso_user) + end + + subject { described_class.new(appeal, params: { user: vso_user }) } + + it "does not show CSS ID to VSO user" do + expect(subject.serializable_hash[:data][:attributes][:assigned_to_location]).to be_nil + end + end + + context "when status is on hold" do + let(:appeal) do + create(:appeal).tap do |appeal| + create(:timed_hold_task, parent: create(:root_task, appeal: appeal)) + end + end + let(:vso_user) { create(:user, :vso_role) } + + before do + User.authenticate!(user: vso_user) + end + + subject { described_class.new(appeal, params: { user: vso_user }) } + + it "does not show CSS ID to VSO user" do + expect(subject.serializable_hash[:data][:attributes][:assigned_to_location]).to be_nil + end + end + + context "when status is misc" do + let(:appeal) do + create(:appeal).tap do |appeal| + create(:ama_judge_dispatch_return_task, parent: create(:root_task, appeal: appeal)) + end + end + let(:vso_user) { create(:user, :vso_role) } + + before do + User.authenticate!(user: vso_user) + end + + subject { described_class.new(appeal, params: { user: vso_user }) } + + it "does not show CSS ID to VSO user" do + expect(subject.serializable_hash[:data][:attributes][:assigned_to_location]).to be_nil + end + end + + context "when status is unknown" do + let(:appeal) { create(:appeal) } + let(:vso_user) { create(:user, :vso_role) } + + before do + User.authenticate!(user: vso_user) + end + + subject { described_class.new(appeal, params: { user: vso_user }) } + + it "does not show CSS ID to VSO user" do + expect(subject.serializable_hash[:data][:attributes][:assigned_to_location]).to be_nil + end + end + + context "when status is assigned to attorney" do + let(:appeal) { create(:appeal, :at_attorney_drafting) } + let(:vso_user) { create(:user, :vso_role) } + + before do + User.authenticate!(user: vso_user) + end + + subject { described_class.new(appeal, params: { user: vso_user }) } + + it "does not show CSS ID to VSO user" do + expect(subject.serializable_hash[:data][:attributes][:assigned_to_location]).to be_nil + end + end + end + + context "when appeal status is not restricted" do + let(:appeal) { create(:appeal, :with_pre_docket_task) } + let(:vso_user) { create(:user, :vso_role) } + + before do + User.authenticate!(user: vso_user) + end + + subject { described_class.new(appeal, params: { user: vso_user }) } + + it "shows CSS ID to VSO user" do + expect(subject.serializable_hash[:data][:attributes][:assigned_to_location]) + .to eq(appeal.assigned_to_location) + end + end + end +end diff --git a/spec/models/tasks/bva_dispatch_task_spec.rb b/spec/models/tasks/bva_dispatch_task_spec.rb index 9124d11eaf8..7e612ef0e51 100644 --- a/spec/models/tasks/bva_dispatch_task_spec.rb +++ b/spec/models/tasks/bva_dispatch_task_spec.rb @@ -113,7 +113,7 @@ decision_document = DecisionDocument.find_by(appeal_id: root_task.appeal.id) expect(ProcessDecisionDocumentJob).to have_received(:perform_later) - .with(decision_document.id, nil).exactly(:once) + .with(decision_document.id, false, nil).exactly(:once) expect(decision_document).to_not eq nil expect(decision_document.document_type).to eq "BVA Decision" expect(decision_document.source).to eq "BVA" @@ -144,7 +144,7 @@ decision_document = DecisionDocument.find_by(appeal_id: legacy_appeal.id) expect(ProcessDecisionDocumentJob).to have_received(:perform_later) - .with(decision_document.id, nil).exactly(:once) + .with(decision_document.id, false, nil).exactly(:once) expect(decision_document).to_not eq nil expect(decision_document.document_type).to eq "BVA Decision" expect(decision_document.source).to eq "BVA" @@ -248,7 +248,7 @@ decision_document = DecisionDocument.find_by(appeal_id: root_task.appeal.id) expect(ProcessDecisionDocumentJob).to have_received(:perform_later) - .with(decision_document.id, nil).exactly(:once) + .with(decision_document.id, false, nil).exactly(:once) expect(decision_document).to_not eq nil expect(decision_document.document_type).to eq "BVA Decision" expect(decision_document.source).to eq "BVA" diff --git a/spec/models/vacols/case_docket_spec.rb b/spec/models/vacols/case_docket_spec.rb index 3af5052a634..073668e37f6 100644 --- a/spec/models/vacols/case_docket_spec.rb +++ b/spec/models/vacols/case_docket_spec.rb @@ -1110,12 +1110,13 @@ def create_case_hearing(original_case, hearing_judge) c end - it "considers cases tied to a judge if they held a hearing after the previous case was decided" do + it "cases are tied to the judge who held a hearing after the previous case was decided", :aggregate_failures do IneligibleJudgesJob.perform_now + # For case distribution levers set to a value + new_hearing_judge_cases = VACOLS::CaseDocket.distribute_priority_appeals(new_hearing_judge, "any", 100, true) tied_judge_cases = VACOLS::CaseDocket.distribute_priority_appeals(tied_judge_caseflow, "any", 100, true) other_judge_cases = VACOLS::CaseDocket.distribute_priority_appeals(other_judge_caseflow, "any", 100, true) - new_hearing_judge_cases = VACOLS::CaseDocket.distribute_priority_appeals(new_hearing_judge, "any", 100, true) expect(new_hearing_judge_cases.map { |c| c["bfkey"] }.sort) .to match_array([ @@ -1131,6 +1132,52 @@ def create_case_hearing(original_case, hearing_judge) .to match_array([ case_7, case_8, case_9, case_10, case_12, case_13 ].map { |c| (c["bfkey"].to_i + 1).to_s }.sort) + + # For case distribution levers set to infinite + CaseDistributionLever.find_by(item: "cavc_affinity_days").update!(value: "infinite") + CaseDistributionLever.find_by(item: "cavc_aod_affinity_days").update!(value: "infinite") + + new_hrng_judge_infinite = VACOLS::CaseDocket.distribute_priority_appeals(new_hearing_judge, "any", 100, true) + tied_judge_infinite = VACOLS::CaseDocket.distribute_priority_appeals(tied_judge_caseflow, "any", 100, true) + other_judge_infinite = VACOLS::CaseDocket.distribute_priority_appeals(other_judge_caseflow, "any", 100, true) + + expect(new_hrng_judge_infinite.map { |c| c["bfkey"] }.sort) + .to match_array([ + case_1, case_2, case_3, case_4, case_5, case_10, case_12 + ].map { |c| (c["bfkey"].to_i + 1).to_s }.sort) + + expect(tied_judge_infinite.map { |c| c["bfkey"] }.sort) + .to match_array([ + case_6, case_9, case_10, case_11, case_12, case_13 + ].map { |c| (c["bfkey"].to_i + 1).to_s }.sort) + + expect(other_judge_infinite.map { |c| c["bfkey"] }.sort) + .to match_array([ + case_7, case_8, case_10, case_12 + ].map { |c| (c["bfkey"].to_i + 1).to_s }.sort) + + # For case distribution levers set to omit + CaseDistributionLever.find_by(item: "cavc_affinity_days").update!(value: "omit") + CaseDistributionLever.find_by(item: "cavc_aod_affinity_days").update!(value: "omit") + + new_hearing_judge_omit = VACOLS::CaseDocket.distribute_priority_appeals(new_hearing_judge, "any", 100, true) + tied_judge_omit = VACOLS::CaseDocket.distribute_priority_appeals(tied_judge_caseflow, "any", 100, true) + other_judge_omit = VACOLS::CaseDocket.distribute_priority_appeals(other_judge_caseflow, "any", 100, true) + + expect(new_hearing_judge_omit.map { |c| c["bfkey"] }.sort) + .to match_array([ + case_1, case_2, case_3, case_4, case_5, case_7, case_8, case_9, case_10, case_11, case_12, case_13 + ].map { |c| (c["bfkey"].to_i + 1).to_s }.sort) + + expect(tied_judge_omit.map { |c| c["bfkey"] }.sort) + .to match_array([ + case_6, case_7, case_8, case_9, case_10, case_11, case_12, case_13 + ].map { |c| (c["bfkey"].to_i + 1).to_s }.sort) + + expect(other_judge_omit.map { |c| c["bfkey"] }.sort) + .to match_array([ + case_7, case_8, case_9, case_10, case_11, case_12, case_13 + ].map { |c| (c["bfkey"].to_i + 1).to_s }.sort) end end end diff --git a/spec/queries/appeals_in_location_63_in_past_2_days_spec.rb b/spec/queries/appeals_in_location_63_in_past_2_days_spec.rb new file mode 100644 index 00000000000..f6f40916ac6 --- /dev/null +++ b/spec/queries/appeals_in_location_63_in_past_2_days_spec.rb @@ -0,0 +1,307 @@ +# frozen_string_literal: true + +require "./app/queries/appeals_in_location_63_in_past_2_days" + +describe AppealsInLocation63InPast2Days do + let(:job) { described_class } + let(:avlj_name) { "Avlj Judge" } + let(:avlj_fname) { "Avlj" } + let(:avlj_lname) { "Judge" } + + let(:non_ssc_avlj) do + User.find_by_css_id("NONSSCTEST") || + create(:user, :non_ssc_avlj_user, css_id: "NONSSCTEST", full_name: avlj_name) + end + + let(:prev_deciding_judge) do + User.find_by_css_id("PREDECJUDG") || + create(:user, :vlj_user, css_id: "PREDECJUDG", full_name: "Prevdec Judge") + end + + let(:appeal) do + { + "tinum" => "150000999988855", + "aod" => false, + "cavc" => false, + "bfd19" => "2023-01-05 00:00:00 UTC", + "bfdloout" => "2024-08-27 09:19:55 UTC", + "ssn" => "999559999", + "snamef" => "Bob", + "snamel" => "Goodman", + "vlj" => non_ssc_avlj.vacols_staff.sattyid, + "vlj_namef" => avlj_fname, + "vlj_namel" => avlj_lname, + "prev_deciding_judge" => prev_deciding_judge.vacols_staff.sattyid, + "bfkey" => "99", + "bfdlocin" => "2024-09-10 14:40:58 UTC", + "bfcurloc" => "63" + } + end + + context "#process and #tied_appeals" do + it "selects all appeals in location 63 and generates the CSV" do + allow(AppealRepository).to receive(:loc_63_appeals).and_return([appeal]) + expect { described_class.process }.not_to raise_error + expect(described_class.loc_63_appeals.size).to eq 1 + end + end + + context "Test the CSV generation" do + context "where it uses attributes " do + it "to create a hash Legacy rows moved to loc 63" do + subject_legacy = described_class.legacy_rows([appeal]).first + + expect(subject_legacy[:docket_number]).to eq appeal["tinum"] + expect(subject_legacy[:aod]).to eq appeal["aod"] + expect(subject_legacy[:cavc]).to be appeal["cavc"] + expect(subject_legacy[:receipt_date]).to eq appeal["bfd19"] + expect(subject_legacy[:ready_for_distribution_at]).to eq appeal["bfdloout"] + expect(subject_legacy[:veteran_file_number]).to eq appeal["ssn"] + expect(subject_legacy[:veteran_name]).to eq "Bob Goodman" + expect(subject_legacy[:hearing_judge_id]).to eq non_ssc_avlj.vacols_staff.sdomainid + expect(subject_legacy[:hearing_judge_name]).to eq avlj_name + expect(subject_legacy[:deciding_judge_id]).to eq prev_deciding_judge.vacols_staff.sdomainid + expect(subject_legacy[:deciding_judge_name]).to eq prev_deciding_judge.full_name + expect(subject_legacy[:affinity_start_date]).to eq nil + expect(subject_legacy[:moved_date_time]).to eq appeal["bfdlocin"] + expect(subject_legacy[:bfcurloc]).to eq appeal["bfcurloc"] + end + end + end + + describe ".loc_63_appeals" do + let(:non_ssc_avlj1) do + User.find_by_css_id("NONSSCTST1") || + create(:user, :non_ssc_avlj_user, css_id: "NONSSCTST1", full_name: "First AVLJ") + end + + let(:non_ssc_avlj2) do + User.find_by_css_id("NONSSCTST2") || + create(:user, :non_ssc_avlj_user, css_id: "NONSSCTST2", full_name: "Second AVLJ") + end + let(:veteran) { create(:veteran) } + + let(:correspondent) do + create( + :correspondent, + snamef: veteran.first_name, + snamel: veteran.last_name, + ssalut: "", ssn: veteran.file_number + ) + end + + let(:vacols_prio_case) do + create( + :case, + :aod, + :tied_to_judge, + :video_hearing_requested, + :type_original, + :ready_for_distribution, + tied_judge: non_ssc_avlj1, + correspondent: correspondent, + bfcorlid: "#{veteran.file_number}S", + case_issues: create_list(:case_issue, 3, :compensation), + bfd19: 60.days.ago + ) + end + + let(:vacols_non_prio_case) do + create( + :case, + :tied_to_judge, + :video_hearing_requested, + :type_original, + :ready_for_distribution, + tied_judge: non_ssc_avlj2, + correspondent: correspondent, + bfcorlid: "#{veteran.file_number}S", + case_issues: create_list(:case_issue, 3, :compensation), + bfd19: 7.days.ago + ) + end + + let!(:legacy_unsigned_priority_tied_to_non_ssc_avlj1) do + legacy_appeal = create( + :legacy_appeal, + :with_root_task, + vacols_case: vacols_prio_case, + closest_regional_office: "RO17" + ) + create(:available_hearing_locations, "RO17", appeal: legacy_appeal) + end + + let!(:legacy_unsigned_non_priority_tied_to_non_ssc_avlj2) do + legacy_appeal = create( + :legacy_appeal, + :with_root_task, + vacols_case: vacols_non_prio_case, + closest_regional_office: "RO17" + ) + create(:available_hearing_locations, "RO17", appeal: legacy_appeal) + end + + let!(:legacy_signed_non_priority_tied_to_non_ssc_avlj1) do + create(:legacy_signed_appeal, :type_original, signing_avlj: non_ssc_avlj, assigned_avlj: non_ssc_avlj) + end + + let!(:legacy_signed_priority_tied_to_non_ssc_avlj2) do + create(:legacy_signed_appeal, :type_cavc_remand, signing_avlj: non_ssc_avlj2, assigned_avlj: non_ssc_avlj2) + end + + let(:appeals) { [] } + + context "there are 2 appeals still in loc 81" do + let(:vacols_prio_case_81) do + create( + :case, + :aod, + :tied_to_judge, + :video_hearing_requested, + :type_original, + :ready_for_distribution, + tied_judge: non_ssc_avlj1, + correspondent: correspondent, + bfcorlid: "#{veteran.file_number}S", + case_issues: create_list(:case_issue, 3, :compensation), + bfd19: 60.days.ago + ) + end + + let(:vacols_non_prio_case_81) do + create( + :case, + :tied_to_judge, + :video_hearing_requested, + :type_original, + :ready_for_distribution, + tied_judge: non_ssc_avlj2, + correspondent: correspondent, + bfcorlid: "#{veteran.file_number}S", + case_issues: create_list(:case_issue, 3, :compensation), + bfd19: 7.days.ago + ) + end + + let!(:legacy_unsigned_priority_tied_to_non_ssc_avlj1_81) do + legacy_appeal = create( + :legacy_appeal, + :with_root_task, + vacols_case: vacols_prio_case_81, + closest_regional_office: "RO17" + ) + create(:available_hearing_locations, "RO17", appeal: legacy_appeal) + end + + let!(:legacy_unsigned_non_priority_tied_to_non_ssc_avlj2_81) do + legacy_appeal = create( + :legacy_appeal, + :with_root_task, + vacols_case: vacols_non_prio_case_81, + closest_regional_office: "RO17" + ) + create(:available_hearing_locations, "RO17", appeal: legacy_appeal) + end + + it "fetches the correct matching appeals only in loc 63" do + move_to_loc_63(vacols_prio_case, 0.days.ago) + move_to_loc_63(vacols_non_prio_case, 1.day.ago) + move_to_loc_63(legacy_signed_non_priority_tied_to_non_ssc_avlj1, 1.day.ago) + move_to_loc_63(legacy_signed_priority_tied_to_non_ssc_avlj2, 2.days.ago) + + expected_appeals = [ + vacols_prio_case, + vacols_non_prio_case, + legacy_signed_non_priority_tied_to_non_ssc_avlj1, + legacy_signed_priority_tied_to_non_ssc_avlj2 + ] + expected_appeals_appended_bfkeys = expected_appeals.map { |ea| "150000#{ea.bfkey}" } + returned_appeals = job.send(:loc_63_appeals) + + expect(returned_appeals.size).to eq(4) + expect(returned_appeals.map { |ra| ra[:docket_number] }).to match_array(expected_appeals_appended_bfkeys) + end + end + + context "there are 2 appeals still in loc 81" do + let(:vacols_prio_case_3_days) do + create( + :case, + :aod, + :tied_to_judge, + :video_hearing_requested, + :type_original, + :ready_for_distribution, + tied_judge: non_ssc_avlj1, + correspondent: correspondent, + bfcorlid: "#{veteran.file_number}S", + case_issues: create_list(:case_issue, 3, :compensation), + bfd19: 60.days.ago + ) + end + + let(:vacols_non_prio_case_90_days) do + create( + :case, + :tied_to_judge, + :video_hearing_requested, + :type_original, + :ready_for_distribution, + tied_judge: non_ssc_avlj2, + correspondent: correspondent, + bfcorlid: "#{veteran.file_number}S", + case_issues: create_list(:case_issue, 3, :compensation), + bfd19: 7.days.ago + ) + end + + let!(:legacy_unsigned_priority_tied_to_non_ssc_avlj1_3_days) do + legacy_appeal = create( + :legacy_appeal, + :with_root_task, + vacols_case: vacols_prio_case_3_days, + closest_regional_office: "RO17" + ) + create(:available_hearing_locations, "RO17", appeal: legacy_appeal) + end + + let!(:legacy_unsigned_non_priority_tied_to_non_ssc_avlj2_90_days) do + legacy_appeal = create( + :legacy_appeal, + :with_root_task, + vacols_case: vacols_non_prio_case_90_days, + closest_regional_office: "RO17" + ) + create(:available_hearing_locations, "RO17", appeal: legacy_appeal) + end + + it "fetches the correct matching appeals only in loc 63" do + move_to_loc_63(vacols_prio_case, 0.days.ago) + move_to_loc_63(vacols_non_prio_case, 1.day.ago) + move_to_loc_63(legacy_signed_non_priority_tied_to_non_ssc_avlj1, 1.day.ago) + move_to_loc_63(legacy_signed_priority_tied_to_non_ssc_avlj2, 2.days.ago) + move_to_loc_63(vacols_prio_case_3_days, 3.days.ago) + move_to_loc_63(vacols_non_prio_case_90_days, 90.days.ago) + + expected_appeals = [ + vacols_prio_case, + vacols_non_prio_case, + legacy_signed_non_priority_tied_to_non_ssc_avlj1, + legacy_signed_priority_tied_to_non_ssc_avlj2 + ] + expected_appeals_appended_bfkeys = expected_appeals.map { |ea| "150000#{ea.bfkey}" } + returned_appeals = job.send(:loc_63_appeals) + + expect(returned_appeals.size).to eq(4) + expect(returned_appeals.map { |ra| ra[:docket_number] }).to match_array(expected_appeals_appended_bfkeys) + end + end + end + + def move_to_loc_63(legacy_case, date) + value = date.in_time_zone("America/New_York") + date_time = Time.utc(value.year, value.month, value.day, value.hour, value.min, value.sec) + + legacy_case.update!(bfcurloc: 63, bfdlocin: date_time) + end +end diff --git a/spec/queries/appeals_tied_to_avljs_and_vljs_query_spec.rb b/spec/queries/appeals_tied_to_avljs_and_vljs_query_spec.rb new file mode 100644 index 00000000000..6ddd918c5c6 --- /dev/null +++ b/spec/queries/appeals_tied_to_avljs_and_vljs_query_spec.rb @@ -0,0 +1,172 @@ +# frozen_string_literal: true + +describe AppealsTiedToAvljsAndVljsQuery do + let(:hearing_judge) { create(:user, :judge, :with_vacols_judge_record) } + let(:original_deciding_judge) { create(:user, :judge, :with_vacols_judge_record) } + + avlj_name = "John Doe" + let(:non_ssc_avlj) do + User.find_by_css_id("NONSSCTEST") || + create(:user, :non_ssc_avlj_user, css_id: "NONSSCTEST", full_name: avlj_name) + end + + signing_vlj_name = "Smith Cash" + let(:signing_vlj) do + User.find_by_css_id("VLJTEST") || + create(:user, :vlj_user, css_id: "VLJTEST", full_name: signing_vlj_name) + end + let(:veteran) { create(:veteran) } + + let(:correspondent) do + create( + :correspondent, + snamef: veteran.first_name, + snamel: veteran.last_name, + ssalut: "", ssn: veteran.file_number + ) + end + + let(:vacols_prio_case) do + create( + :case, + :aod, + :tied_to_judge, + :video_hearing_requested, + :type_original, + :ready_for_distribution, + tied_judge: non_ssc_avlj, + correspondent: correspondent, + bfcorlid: "#{veteran.file_number}S", + case_issues: create_list(:case_issue, 3, :compensation), + bfd19: 60.days.ago + ) + end + let(:vacols_non_prio_case) do + create( + :case, + :tied_to_judge, + :video_hearing_requested, + :type_original, + :ready_for_distribution, + tied_judge: non_ssc_avlj, + correspondent: correspondent, + bfcorlid: "#{veteran.file_number}S", + case_issues: create_list(:case_issue, 3, :compensation), + bfd19: 7.days.ago + ) + end + + context "#process and #tied_appeals" do + # Base appeals not tied to non ssc avljs that should NOT be grabbed from the query + let!(:not_ready_ama_original_appeal) { create(:appeal, :evidence_submission_docket, :with_post_intake_tasks) } + let!(:ama_original_direct_review_appeal) { create(:appeal, :direct_review_docket, :ready_for_distribution) } + let!(:ama_original_evidence_submission_appeal) do + create(:appeal, :evidence_submission_docket, :ready_for_distribution) + end + + let!(:not_ready_legacy_original_appeal) do + create(:case_with_form_9, :type_original, :travel_board_hearing_requested) + end + let!(:legacy_original_appeal_no_hearing) { create(:case, :type_original, :ready_for_distribution) } + + # Appeals that should be grabbed with the Query + let!(:legacy_unsigned_priority_tied_to_non_ssc_avlj) do + legacy_appeal = create( + :legacy_appeal, + :with_root_task, + vacols_case: vacols_prio_case, + closest_regional_office: "RO17" + ) + create(:available_hearing_locations, "RO17", appeal: legacy_appeal) + end + + let!(:legacy_unsigned_non_priority_tied_to_non_ssc_avlj) do + legacy_appeal = create( + :legacy_appeal, + :with_root_task, + vacols_case: vacols_non_prio_case, + closest_regional_office: "RO17" + ) + create(:available_hearing_locations, "RO17", appeal: legacy_appeal) + end + + let!(:legacy_signed_non_priority_tied_to_non_ssc_avlj) do + create(:legacy_signed_appeal, :type_original, signing_avlj: signing_vlj, assigned_avlj: non_ssc_avlj) + end + + let!(:legacy_signed_priority_tied_to_non_ssc_avlj) do + create(:legacy_signed_appeal, :type_cavc_remand, signing_avlj: signing_vlj, assigned_avlj: non_ssc_avlj) + end + + let!(:legacy_original_appeal_with_hearing) do + create(:case, :type_original, :ready_for_distribution, case_hearings: [legacy_original_appeal_case_hearing]) + end + let(:legacy_original_appeal_case_hearing) { build(:case_hearing, :disposition_held, user: hearing_judge) } + + let!(:ama_original_hearing_appeal) do + create(:appeal, :hearing_docket, :held_hearing_and_ready_to_distribute, tied_judge: hearing_judge) + end + + it "selects all appeals tied to non ssc avlj and generates the CSV" do + expect { described_class.process }.not_to raise_error + expect(described_class.tied_appeals.size).to eq 6 + end + end + + context "Test the CSV generation" do + let!(:legacy_signed_appeal_with_attributes) do + create(:legacy_signed_appeal, :type_original, signing_avlj: signing_vlj, assigned_avlj: non_ssc_avlj) + end + + let!(:ama_appeal) do + create(:appeal, :hearing_docket, :held_hearing_and_ready_to_distribute, tied_judge: hearing_judge) + end + + let(:legacy_query_result) { VACOLS::CaseDocket.appeals_tied_to_avljs_and_vljs } + + let(:docket) { HearingRequestDocket.new } + let(:ama_query_result) { docket.tied_to_vljs(described_class.vlj_user_ids) } + + context "where it uses attributes " do + it "to create a hash for AMA and Legacy rows" do + subject_legacy = described_class.legacy_rows(legacy_query_result, :legacy).first + subject_ama = described_class.ama_rows(ama_query_result, :hearing).first + corres = legacy_signed_appeal_with_attributes.reload.correspondent + corres_ama = ama_appeal.reload.veteran + + expect(subject_legacy[:docket_number]).to eq legacy_signed_appeal_with_attributes.folder.tinum + expect(subject_legacy[:docket]).to eq "legacy" + expect(subject_legacy[:priority]).to be "" + expect(subject_legacy[:veteran_file_number]).to eq corres.ssn + expect(subject_legacy[:veteran_name]).to eq "#{corres.snamef} #{corres.snamel}" + expect(subject_legacy[:vlj]).to eq avlj_name + expect(subject_legacy[:hearing_judge]).to eq avlj_name + expect(subject_legacy[:most_recent_signing_judge]).to eq signing_vlj_name + expect(subject_legacy[:bfcurloc]).to eq legacy_signed_appeal_with_attributes.bfcurloc + + expect(subject_ama[:docket_number]).to eq ama_appeal.docket_number + expect(subject_ama[:docket]).to eq "hearing" + expect(subject_ama[:priority]).to be false + expect(subject_ama[:veteran_file_number]).to eq corres_ama.file_number + expect(subject_ama[:veteran_name]).to eq corres_ama.name.to_s + expect(subject_ama[:vlj]).to eq hearing_judge.full_name + expect(subject_ama[:hearing_judge]).to eq hearing_judge.full_name + expect(subject_ama[:most_recent_signing_judge]).to eq nil + expect(subject_ama[:bfcurloc]).to eq nil + end + + it "to verify that calculate_field_values is returning the correct items" do + subject = described_class.calculate_field_values(legacy_query_result.first) + + corres = legacy_signed_appeal_with_attributes.reload.correspondent + + expect(subject[:veteran_file_number]).to eq corres.ssn + expect(subject[:veteran_name]).to eq "#{corres.snamef} #{corres.snamel}" + expect(subject[:vlj]).to eq avlj_name + expect(subject[:hearing_judge]).to eq avlj_name + expect(subject[:most_recent_signing_judge]).to eq signing_vlj_name + expect(subject[:bfcurloc]).to eq legacy_signed_appeal_with_attributes.bfcurloc + end + end + end +end diff --git a/spec/queries/appeals_tied_to_non_ssc_avlj_query_spec.rb b/spec/queries/appeals_tied_to_non_ssc_avlj_query_spec.rb new file mode 100644 index 00000000000..a4002422312 --- /dev/null +++ b/spec/queries/appeals_tied_to_non_ssc_avlj_query_spec.rb @@ -0,0 +1,157 @@ +# frozen_string_literal: true + +describe AppealsTiedToNonSscAvljQuery do + let(:hearing_judge) { create(:user, :judge, :with_vacols_judge_record) } + let(:original_deciding_judge) { create(:user, :judge, :with_vacols_judge_record) } + + avlj_name = "John Doe" + let(:non_ssc_avlj) do + User.find_by_css_id("NONSSCTEST") || + create(:user, :non_ssc_avlj_user, css_id: "NONSSCTEST", full_name: avlj_name) + end + let(:veteran) { create(:veteran) } + + let(:correspondent) do + create( + :correspondent, + snamef: veteran.first_name, + snamel: veteran.last_name, + ssalut: "", ssn: veteran.file_number + ) + end + + let(:vacols_prio_case) do + create( + :case, + :aod, + :tied_to_judge, + :video_hearing_requested, + :type_original, + :ready_for_distribution, + tied_judge: non_ssc_avlj, + correspondent: correspondent, + bfcorlid: "#{veteran.file_number}S", + case_issues: create_list(:case_issue, 3, :compensation), + bfd19: 60.days.ago + ) + end + let(:vacols_non_prio_case) do + create( + :case, + :tied_to_judge, + :video_hearing_requested, + :type_original, + :ready_for_distribution, + tied_judge: non_ssc_avlj, + correspondent: correspondent, + bfcorlid: "#{veteran.file_number}S", + case_issues: create_list(:case_issue, 3, :compensation), + bfd19: 7.days.ago + ) + end + + context "#process and #tied_appeals" do + # Base appeals not tied to non ssc avljs that should NOT be grabbed from the query + let!(:not_ready_ama_original_appeal) { create(:appeal, :evidence_submission_docket, :with_post_intake_tasks) } + let!(:ama_original_direct_review_appeal) { create(:appeal, :direct_review_docket, :ready_for_distribution) } + let!(:ama_original_evidence_submission_appeal) do + create(:appeal, :evidence_submission_docket, :ready_for_distribution) + end + let!(:ama_original_hearing_appeal) do + create(:appeal, :hearing_docket, :held_hearing_and_ready_to_distribute, tied_judge: hearing_judge) + end + + let!(:not_ready_legacy_original_appeal) do + create(:case_with_form_9, :type_original, :travel_board_hearing_requested) + end + let!(:legacy_original_appeal_no_hearing) { create(:case, :type_original, :ready_for_distribution) } + let!(:legacy_original_appeal_with_hearing) do + create(:case, :type_original, :ready_for_distribution, case_hearings: [legacy_original_appeal_case_hearing]) + end + let(:legacy_original_appeal_case_hearing) { build(:case_hearing, :disposition_held, user: hearing_judge) } + + # Appeals that should be grabbed with the Query + let!(:legacy_unsigned_priority_tied_to_non_ssc_avlj) do + legacy_appeal = create( + :legacy_appeal, + :with_root_task, + vacols_case: vacols_prio_case, + closest_regional_office: "RO17" + ) + create(:available_hearing_locations, "RO17", appeal: legacy_appeal) + end + + let!(:legacy_unsigned_non_priority_tied_to_non_ssc_avlj) do + legacy_appeal = create( + :legacy_appeal, + :with_root_task, + vacols_case: vacols_non_prio_case, + closest_regional_office: "RO17" + ) + create(:available_hearing_locations, "RO17", appeal: legacy_appeal) + end + + let!(:legacy_signed_non_priority_tied_to_non_ssc_avlj) do + create(:legacy_signed_appeal, :type_original, signing_avlj: non_ssc_avlj, assigned_avlj: non_ssc_avlj) + end + + let!(:legacy_signed_priority_tied_to_non_ssc_avlj) do + create(:legacy_signed_appeal, :type_cavc_remand, signing_avlj: non_ssc_avlj, assigned_avlj: non_ssc_avlj) + end + + it "selects all appeals tied to non ssc avlj and generates the CSV" do + expect { described_class.process }.not_to raise_error + expect(described_class.tied_appeals.size).to eq 4 + end + end + + context "Test the CSV generation" do + let!(:legacy_signed_appeal_with_attributes) do + create(:legacy_signed_appeal, :type_original, signing_avlj: non_ssc_avlj, assigned_avlj: non_ssc_avlj) + end + + let(:query_result) { VACOLS::CaseDocket.appeals_tied_to_non_ssc_avljs } + + subject { described_class.legacy_rows(query_result, :legacy).first } + + context "where it uses attributes " do + it "to create a hash for the row" do + corres = legacy_signed_appeal_with_attributes.reload.correspondent + + expect(subject[:docket_number]).to eq legacy_signed_appeal_with_attributes.folder.tinum + expect(subject[:docket]).to eq "legacy" + expect(subject[:priority]).to be "" + expect(subject[:veteran_file_number]).to eq corres.ssn + expect(subject[:veteran_name]).to eq "#{corres.snamef} #{corres.snamel}" + expect(subject[:non_ssc_avlj]).to eq avlj_name + expect(subject[:hearing_judge]).to eq avlj_name + expect(subject[:most_recent_signing_judge]).to eq avlj_name + expect(subject[:bfcurloc]).to eq legacy_signed_appeal_with_attributes.bfcurloc + end + + context "to test getting the avlj name from appeal" do + it "where appeals vlj is nil" do + appeal = query_result.first + appeal["vlj"] = nil + expect(described_class.get_avlj_name(appeal)).to eq nil + end + it "where appeals vlj is not nil" do + appeal = query_result.first + expect(described_class.get_avlj_name(appeal)).to eq avlj_name + end + end + + context "to test getting the prev judges name from appeal" do + it "where appeal has no prev deciding judge" do + appeal = query_result.first + appeal["prev_deciding_judge"] = nil + expect(described_class.get_prev_judge_name(appeal)).to eq nil + end + it "where appeal has a previous deciding judge" do + appeal = query_result.first + expect(described_class.get_prev_judge_name(appeal)).to eq avlj_name + end + end + end + end +end diff --git a/spec/seeds/case_distribution_test_data_spec.rb b/spec/seeds/case_distribution_test_data_spec.rb index 373982fd017..86d44f2186f 100644 --- a/spec/seeds/case_distribution_test_data_spec.rb +++ b/spec/seeds/case_distribution_test_data_spec.rb @@ -45,7 +45,7 @@ seed.seed! # checking CaseDistributionlevers count - expect(CaseDistributionLever.count).to eq 28 + expect(CaseDistributionLever.count).to eq 30 expect(Appeal.where(docket_type: "direct_review").count).to eq 38 expect(Appeal.where(docket_type: "direct_review").first.receipt_date).to eq(Time.zone.today - (20.years + 1.day)) diff --git a/spec/services/metrics_service_spec.rb b/spec/services/metrics_service_spec.rb index 9b349e84ebe..da2df30db23 100644 --- a/spec/services/metrics_service_spec.rb +++ b/spec/services/metrics_service_spec.rb @@ -63,7 +63,7 @@ service: service, endpoint: name }, - sent_to: [["rails_console"], "datadog", "dynatrace"], + sent_to: [["rails_console"], "dynatrace"], sent_to_info: { metric_group: "service", metric_name: "request_latency", diff --git a/spec/services/sqs_service_spec.rb b/spec/services/sqs_service_spec.rb new file mode 100644 index 00000000000..9322989ff54 --- /dev/null +++ b/spec/services/sqs_service_spec.rb @@ -0,0 +1,136 @@ +# frozen_string_literal: true + +describe SqsService do + let(:sqs_client) { SqsService.sqs_client } + + before(:each) { wipe_queues } + after(:all) { wipe_queues } + + context "#find_queue_url_by_name" do + let!(:queue) { create_queue(queue_name, fifo) } + + subject { SqsService.find_queue_url_by_name(name: queue_name, check_fifo: false) } + + context "FIFO" do + let(:fifo) { true } + let(:queue_name) { "my_fifo_queue" } + + it "the queue is found and is validated to be a FIFO queue" do + expect(subject { SqsService.find_queue_url_by_name(name: queue_name, check_fifo: true) }) + .to include("caseflow_test_my_fifo_queue.fifo") + end + + it "the queue is found while validation is opted out" do + is_expected.to include("caseflow_test_my_fifo_queue.fifo") + end + + it "a non-existent queue cannot be found" do + expect { SqsService.find_queue_url_by_name(name: "fake", check_fifo: false) }.to raise_error do |error| + expect(error).to be_a(Caseflow::Error::SqsQueueNotFoundError) + expect(error.to_s).to include("The fake SQS queue is missing in this environment.") + end + end + end + + context "non-FIFO" do + let(:fifo) { false } + let(:queue_name) { "my_normal_queue" } + + it "the queue is found" do + is_expected.to include("caseflow_test_my_normal_queue") + is_expected.to_not include(".fifo") + end + + it "the queue found fails the FIFO check" do + expect { SqsService.find_queue_url_by_name(name: queue_name, check_fifo: true) }.to raise_error do |error| + expect(error).to be_a(Caseflow::Error::SqsUnexpectedQueueTypeError) + expect(error.to_s).to include("No FIFO queue with name my_normal_queue could be located.") + end + end + end + end + + context "#batch_delete_messages" do + let!(:queue) { create_queue("batch_delete_test", false) } + let(:queue_url) { queue.queue_url } + + context "ten or fewer messages are deleted" do + let!(:initial_messages) { queue_messages(queue_url) } + let(:received_messages) do + SqsService.sqs_client.receive_message({ + queue_url: queue_url, + max_number_of_messages: 10 + }).messages + end + + it "the messages are deleted properly" do + expect(approximate_number_of_messages_in_queue(queue_url)).to eq 10 + + SqsService.batch_delete_messages(queue_url: queue_url, messages: received_messages) + + expect(approximate_number_of_messages_in_queue(queue_url)).to eq 0 + end + end + + context "more than ten messages are deleted" + let!(:initial_messages) { queue_messages(queue_url, 20) } + + let(:received_messages) do + Array.new(2).flat_map do + SqsService.sqs_client.receive_message( + { + queue_url: queue_url, + max_number_of_messages: 10 + } + ).messages + end + end + + it "the messages are deleted properly" do + expect(approximate_number_of_messages_in_queue(queue_url)).to eq 20 + + SqsService.batch_delete_messages(queue_url: queue.queue_url, messages: received_messages) + + expect(approximate_number_of_messages_in_queue(queue_url)).to eq 0 + end + end + + def create_queue(name, fifo = false) + sqs_client.create_queue({ + queue_name: "caseflow_test_#{name}#{fifo ? '.fifo' : ''}".to_sym, + attributes: fifo ? { "FifoQueue" => "true" } : {} + }) + end + + def queue_messages(queue_url, num_to_queue = 10) + bodies = Array.new(num_to_queue).map.with_index do |_val, idx| + { test: idx }.to_json + end + + bodies.each do |body| + sqs_client.send_message({ + queue_url: queue_url, + message_body: body + }) + end + end + + def approximate_number_of_messages_in_queue(queue_url) + resp = sqs_client.get_queue_attributes({ + queue_url: queue_url, + attribute_names: ["ApproximateNumberOfMessages"] + }) + + resp.attributes["ApproximateNumberOfMessages"].to_i + end + + def wipe_queues + client = SqsService.sqs_client + + queues_to_delete = client.list_queues.queue_urls.filter { _1.include?("caseflow_test") } + + queues_to_delete.each do |queue_url| + client.delete_queue(queue_url: queue_url) + end + end +end diff --git a/spec/workflows/ihp_tasks_factory_spec.rb b/spec/workflows/ihp_tasks_factory_spec.rb index 29f185ee4eb..7c80b37f763 100644 --- a/spec/workflows/ihp_tasks_factory_spec.rb +++ b/spec/workflows/ihp_tasks_factory_spec.rb @@ -3,7 +3,7 @@ require "rails_helper" describe IhpTasksFactory, :postgres do - let(:appeal) { create(:appeal) } + let(:appeal) { create(:appeal, :active) } let(:parent_task) { create(:task, appeal: appeal) } let(:ihp_tasks_factory) { IhpTasksFactory.new(parent_task) }