diff --git a/.codeclimate.yml b/.codeclimate.yml index bc142b4e9d5..998bef4649b 100644 --- a/.codeclimate.yml +++ b/.codeclimate.yml @@ -49,7 +49,7 @@ plugins: languages: ruby: javascript: - mass_threshold: 50 + mass_threshold: 81 exclude_patterns: - 'db/migrate/*' - 'app/controllers/idt/api/v2/appeals_controller.rb' @@ -122,3 +122,4 @@ exclude_patterns: - 'tmp/**/*' - 'app/assets/**/*' - 'client/test/data/camoQueueConfigData.js' + - 'client/app/intake/components/mockData/issueListProps.js' diff --git a/.github/workflows/workflow.yml b/.github/workflows/workflow.yml index 2a1f53c9cca..b7dd6014fb0 100644 --- a/.github/workflows/workflow.yml +++ b/.github/workflows/workflow.yml @@ -189,6 +189,8 @@ jobs: run: | ./ci-bin/capture-log "DB=etl bundle exec rake db:create db:schema:load db:migrate" ./ci-bin/capture-log "bundle exec rake db:create db:schema:load db:migrate" + ./ci-bin/capture-log "make -f Makefile.example external-db-create" + # added line to create external table(s) that are needed for tests # We don't want to seed DBs here because DatabaseCleaner just truncates it anyway. The setup_vacols # rake task needs to be run because it adds data to two tables that are ignored by DBCleaner @@ -329,16 +331,16 @@ jobs: - name: Install Node Dependencies run: ./ci-bin/capture-log "cd client && yarn install --frozen-lockfile" - - name: Danger - run: ./ci-bin/capture-log "bundle exec danger" - env: - DANGER_GITHUB_API_TOKEN: ${{ secrets.DANGER_GITHUB_API_TOKEN }} + # - name: Danger + # run: ./ci-bin/capture-log "bundle exec danger" + # env: + # DANGER_GITHUB_API_TOKEN: ${{ secrets.DANGER_GITHUB_API_TOKEN }} - name: Lint run: ./ci-bin/capture-log "bundle exec rake lint" if: ${{ always() }} - - name: Security - run: ./ci-bin/capture-log "bundle exec rake security" - if: ${{ always() }} + # - name: Security + # run: ./ci-bin/capture-log "bundle exec rake security" + # if: ${{ always() }} diff --git a/.reek.yml b/.reek.yml index 77455d0fd73..f1de4d08095 100644 --- a/.reek.yml +++ b/.reek.yml @@ -252,6 +252,7 @@ detectors: - Reporter#percent - SanitizedJsonConfiguration - ScheduleHearingTaskPager#sorted_tasks + - UpdatePOAConcern - VBMSCaseflowLogger#log - LegacyDocket UnusedParameters: diff --git a/Gemfile b/Gemfile index df9a7c2e660..8d6a02bb0cf 100644 --- a/Gemfile +++ b/Gemfile @@ -19,7 +19,7 @@ gem "browser" gem "business_time", "~> 0.9.3" gem "caseflow", git: "https://github.com/department-of-veterans-affairs/caseflow-commons", ref: "6377b46c2639248574673adc6a708d2568c6958c" gem "connect_mpi", git: "https://github.com/department-of-veterans-affairs/connect-mpi.git", ref: "a3a58c64f85b980a8b5ea6347430dd73a99ea74c" -gem "connect_vbms", git: "https://github.com/department-of-veterans-affairs/connect_vbms.git", ref: "98b1f9f8aa368189a59af74d91cb0aa4c55006af" +gem "connect_vbms", git: "https://github.com/department-of-veterans-affairs/connect_vbms.git", ref: "9807d9c9f0f3e3494a60b6693dc4f455c1e3e922" gem "console_tree_renderer", git: "https://github.com/department-of-veterans-affairs/console-tree-renderer.git", tag: "v0.1.1" gem "countries" gem "ddtrace" @@ -38,7 +38,7 @@ gem "moment_timezone-rails" gem "multiverse" gem "newrelic_rpm" gem "nokogiri", ">= 1.11.0.rc4" -gem "paper_trail", "~> 10" +gem "paper_trail", "~> 12.0" # Used to speed up reporting gem "parallel" # soft delete gem @@ -47,8 +47,8 @@ gem "paranoia", "~> 2.2" gem "pdf-forms" # Used in Caseflow Dispatch gem "pdfjs_viewer-rails", git: "https://github.com/senny/pdfjs_viewer-rails.git", ref: "a4249eacbf70175db63b57e9f364d0a9a79e2b43" -#Used to build out PDF files on the backend -#https://github.com/pdfkit/pdfkit +# Used to build out PDF files on the backend +# https://github.com/pdfkit/pdfkit gem "pdfkit" gem "pg", platforms: :ruby # Application server: Puma @@ -56,11 +56,12 @@ gem "pg", platforms: :ruby # Discussion: https://github.com/18F/college-choice/issues/597#issuecomment-139034834 gem "puma", "5.6.4" gem "rack", "~> 2.2.6.2" -gem "rails", "5.2.4.6" +gem "rails", "5.2.8.1" # Used to colorize output for rake tasks gem "rainbow" # React gem "react_on_rails", "11.3.0" +gem "redis-mutex" gem "redis-namespace" gem "redis-rails", "~> 5.0.2" gem "request_store" @@ -98,7 +99,7 @@ group :test, :development, :demo do gem "capybara" gem "capybara-screenshot" gem "danger", "~> 6.2.2" - gem "database_cleaner" + gem "database_cleaner-active_record" gem "factory_bot_rails", "~> 5.2" gem "faker" gem "guard-rspec" diff --git a/Gemfile.lock b/Gemfile.lock index de2664ad49b..626387a6945 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -35,8 +35,8 @@ GIT GIT remote: https://github.com/department-of-veterans-affairs/connect_vbms.git - revision: 98b1f9f8aa368189a59af74d91cb0aa4c55006af - ref: 98b1f9f8aa368189a59af74d91cb0aa4c55006af + revision: 9807d9c9f0f3e3494a60b6693dc4f455c1e3e922 + ref: 9807d9c9f0f3e3494a60b6693dc4f455c1e3e922 specs: connect_vbms (1.3.0) httpclient (~> 2.8.0) @@ -86,48 +86,48 @@ GEM remote: https://rubygems.org/ specs: aasm (4.11.0) - actioncable (5.2.4.6) - actionpack (= 5.2.4.6) + actioncable (5.2.8.1) + actionpack (= 5.2.8.1) nio4r (~> 2.0) websocket-driver (>= 0.6.1) - actionmailer (5.2.4.6) - actionpack (= 5.2.4.6) - actionview (= 5.2.4.6) - activejob (= 5.2.4.6) + actionmailer (5.2.8.1) + actionpack (= 5.2.8.1) + actionview (= 5.2.8.1) + activejob (= 5.2.8.1) mail (~> 2.5, >= 2.5.4) rails-dom-testing (~> 2.0) - actionpack (5.2.4.6) - actionview (= 5.2.4.6) - activesupport (= 5.2.4.6) + actionpack (5.2.8.1) + actionview (= 5.2.8.1) + activesupport (= 5.2.8.1) rack (~> 2.0, >= 2.0.8) rack-test (>= 0.6.3) rails-dom-testing (~> 2.0) rails-html-sanitizer (~> 1.0, >= 1.0.2) - actionview (5.2.4.6) - activesupport (= 5.2.4.6) + actionview (5.2.8.1) + activesupport (= 5.2.8.1) builder (~> 3.1) erubi (~> 1.4) rails-dom-testing (~> 2.0) rails-html-sanitizer (~> 1.0, >= 1.0.3) - activejob (5.2.4.6) - activesupport (= 5.2.4.6) + activejob (5.2.8.1) + activesupport (= 5.2.8.1) globalid (>= 0.3.6) - activemodel (5.2.4.6) - activesupport (= 5.2.4.6) - activerecord (5.2.4.6) - activemodel (= 5.2.4.6) - activesupport (= 5.2.4.6) + activemodel (5.2.8.1) + activesupport (= 5.2.8.1) + activerecord (5.2.8.1) + activemodel (= 5.2.8.1) + activesupport (= 5.2.8.1) arel (>= 9.0) activerecord-import (1.0.2) activerecord (>= 3.2) activerecord-oracle_enhanced-adapter (5.2.8) activerecord (~> 5.2.0) ruby-plsql (>= 0.6.0) - activestorage (5.2.4.6) - actionpack (= 5.2.4.6) - activerecord (= 5.2.4.6) - marcel (~> 0.3.1) - activesupport (5.2.4.6) + activestorage (5.2.8.1) + actionpack (= 5.2.8.1) + activerecord (= 5.2.8.1) + marcel (~> 1.0.0) + activesupport (5.2.8.1) concurrent-ruby (~> 1.0, >= 1.0.2) i18n (>= 0.7, < 2) minitest (~> 5.1) @@ -198,7 +198,7 @@ GEM coderay (1.1.3) colored2 (3.1.2) colorize (0.8.1) - concurrent-ruby (1.1.8) + concurrent-ruby (1.2.2) connection_pool (2.2.3) cork (0.3.0) colored2 (~> 3.1) @@ -224,7 +224,11 @@ GEM no_proxy_fix octokit (~> 4.7) terminal-table (~> 1) - database_cleaner (1.7.0) + database_cleaner-active_record (2.1.0) + activerecord (>= 5.a) + database_cleaner-core (~> 2.0.0) + database_cleaner-core (2.0.1) + date (3.3.3) ddtrace (0.34.1) msgpack debase (0.2.4.1) @@ -278,7 +282,7 @@ GEM dry-logic (~> 1.0, >= 1.0.2) ecma-re-validator (0.2.1) regexp_parser (~> 1.2) - erubi (1.10.0) + erubi (1.12.0) execjs (2.7.0) factory_bot (5.2.0) activesupport (>= 4.2.0) @@ -308,8 +312,8 @@ GEM git (1.13.2) addressable (~> 2.8) rchardet (~> 1.8) - globalid (0.4.2) - activesupport (>= 4.2.0) + globalid (1.1.0) + activesupport (>= 5.0) govdelivery-tms (2.8.4) activesupport faraday @@ -339,7 +343,7 @@ GEM httpi (2.4.4) rack socksify - i18n (1.8.10) + i18n (1.14.1) concurrent-ruby (~> 1.0) i18n_data (0.10.0) icalendar (2.6.1) @@ -391,14 +395,16 @@ GEM logstasher (2.1.5) activesupport (>= 5.2) request_store - loofah (2.9.1) + loofah (2.21.3) crass (~> 1.0.2) - nokogiri (>= 1.5.9) + nokogiri (>= 1.12.0) lumberjack (1.0.13) - mail (2.7.1) + mail (2.8.1) mini_mime (>= 0.1.1) - marcel (0.3.3) - mimemagic (~> 0.3.2) + net-imap + net-pop + net-smtp + marcel (1.0.2) maruku (0.7.3) memory_profiler (0.9.14) meta_request (0.7.2) @@ -408,12 +414,9 @@ GEM mime-types (3.3) mime-types-data (~> 3.2015) mime-types-data (3.2019.1009) - mimemagic (0.3.10) - nokogiri (~> 1) - rake - mini_mime (1.1.0) - mini_portile2 (2.7.1) - minitest (5.14.4) + mini_mime (1.1.2) + mini_portile2 (2.8.4) + minitest (5.19.0) moment_timezone-rails (0.5.0) momentjs-rails (2.29.4.1) railties (>= 3.1) @@ -428,11 +431,20 @@ GEM neat (4.0.0) thor (~> 0.19) nenv (0.3.0) + net-imap (0.3.7) + date + net-protocol + net-pop (0.1.2) + net-protocol + net-protocol (0.2.1) + timeout + net-smtp (0.3.3) + net-protocol newrelic_rpm (6.5.0.357) - nio4r (2.5.8) + nio4r (2.5.9) no_proxy_fix (0.1.2) - nokogiri (1.13.1) - mini_portile2 (~> 2.7.0) + nokogiri (1.15.3) + mini_portile2 (~> 2.8.2) racc (~> 1.4) nori (2.6.0) notiffany (0.1.1) @@ -442,8 +454,8 @@ GEM faraday (>= 0.9) sawyer (~> 0.8.0, >= 0.5.3) open4 (1.3.4) - paper_trail (10.3.1) - activerecord (>= 4.2) + paper_trail (12.3.0) + activerecord (>= 5.2) request_store (~> 1.1) parallel (1.19.1) paranoia (2.4.2) @@ -467,38 +479,40 @@ GEM public_suffix (4.0.6) puma (5.6.4) nio4r (~> 2.0) - racc (1.6.0) - rack (2.2.6.2) + racc (1.7.1) + rack (2.2.6.4) rack-contrib (2.1.0) rack (~> 2.0) - rack-test (1.1.0) - rack (>= 1.0, < 3) - rails (5.2.4.6) - actioncable (= 5.2.4.6) - actionmailer (= 5.2.4.6) - actionpack (= 5.2.4.6) - actionview (= 5.2.4.6) - activejob (= 5.2.4.6) - activemodel (= 5.2.4.6) - activerecord (= 5.2.4.6) - activestorage (= 5.2.4.6) - activesupport (= 5.2.4.6) + rack-test (2.1.0) + rack (>= 1.3) + rails (5.2.8.1) + actioncable (= 5.2.8.1) + actionmailer (= 5.2.8.1) + actionpack (= 5.2.8.1) + actionview (= 5.2.8.1) + activejob (= 5.2.8.1) + activemodel (= 5.2.8.1) + activerecord (= 5.2.8.1) + activestorage (= 5.2.8.1) + activesupport (= 5.2.8.1) bundler (>= 1.3.0) - railties (= 5.2.4.6) + railties (= 5.2.8.1) sprockets-rails (>= 2.0.0) - rails-dom-testing (2.0.3) - activesupport (>= 4.2.0) + rails-dom-testing (2.1.1) + activesupport (>= 5.0.0) + minitest nokogiri (>= 1.6) rails-erd (1.6.0) activerecord (>= 4.2) activesupport (>= 4.2) choice (~> 0.2.0) ruby-graphviz (~> 1.2) - rails-html-sanitizer (1.3.0) - loofah (~> 2.3) - railties (5.2.4.6) - actionpack (= 5.2.4.6) - activesupport (= 5.2.4.6) + rails-html-sanitizer (1.6.0) + loofah (~> 2.21) + nokogiri (~> 1.14) + railties (5.2.8.1) + actionpack (= 5.2.8.1) + activesupport (= 5.2.8.1) method_source rake (>= 0.8.7) thor (>= 0.19.0, < 2.0) @@ -523,6 +537,10 @@ GEM redis-activesupport (5.0.4) activesupport (>= 3, < 6) redis-store (>= 1.3, < 2) + redis-classy (2.4.1) + redis-namespace (~> 1.0) + redis-mutex (4.0.2) + redis-classy (~> 2.0) redis-namespace (1.6.0) redis (>= 3.0.4) redis-rack (2.0.4) @@ -652,9 +670,9 @@ GEM sprockets (3.7.2) concurrent-ruby (~> 1.0) rack (> 1, < 3) - sprockets-rails (3.2.2) - actionpack (>= 4.0) - activesupport (>= 4.0) + sprockets-rails (3.4.2) + actionpack (>= 5.2) + activesupport (>= 5.2) sprockets (>= 3.0.0) sql_tracker (1.3.2) stringex (2.8.5) @@ -670,6 +688,7 @@ GEM thread_safe (0.3.6) tilt (2.0.8) timecop (0.9.1) + timeout (0.4.0) tty-tree (0.3.0) tzinfo (1.2.10) thread_safe (~> 0.1) @@ -694,7 +713,7 @@ GEM crack (>= 0.3.2) hashdiff (>= 0.4.0, < 2.0.0) webrick (1.7.0) - websocket-driver (0.7.3) + websocket-driver (0.7.6) websocket-extensions (>= 0.1.0) websocket-extensions (0.1.5) xmldsig (0.3.2) @@ -738,7 +757,7 @@ DEPENDENCIES console_tree_renderer! countries danger (~> 6.2.2) - database_cleaner + database_cleaner-active_record ddtrace debase derailed_benchmarks @@ -766,7 +785,7 @@ DEPENDENCIES multiverse newrelic_rpm nokogiri (>= 1.11.0.rc4) - paper_trail (~> 10) + paper_trail (~> 12.0) parallel paranoia (~> 2.2) pdf-forms @@ -778,11 +797,12 @@ DEPENDENCIES pry-byebug (~> 3.9) puma (= 5.6.4) rack (~> 2.2.6.2) - rails (= 5.2.4.6) + rails (= 5.2.8.1) rails-erd rainbow rb-readline react_on_rails (= 11.3.0) + redis-mutex redis-namespace redis-rails (~> 5.0.2) request_store diff --git a/MAC_M1.md b/MAC_M1.md index 603c98e0b71..571b340cba5 100644 --- a/MAC_M1.md +++ b/MAC_M1.md @@ -14,6 +14,10 @@ Frequently Asked Question: Apple Silicon processors use a different architecture (arm64/aarch64) than Intel processors (x86_64). Oracle, which is used for the VACOLS database, does not have binaries to run any of their database tools natively on arm64 for MacOS. Additionally, the Ruby gems `therubyracer` and `jshint` require the library v8@3.15, which can also only be compiled and installed on x86_64 processors. To work around this we use Rosetta to emulate x86_64 processors in the terminal, installing most of the Caseflow dependencies via the x86_64 version of Homebrew. It is important that while setting up your environment, you ensure you are *in the correct terminal type* and *in the correct directory* so that you do not install or compile a dependency with the wrong architecture. +2. I am running into errors! Where can I go for help? + +See the Installation Workarounds section for common or previously relevant workarounds that may help. Additionally, join the #bid_appeals_mac_support channel in Slack (or ask your scrum master to add you). You can search that channel to see if your issue has been previously discussed or post what step you are having a problem on and what you've done so far. + ***Ensure command line tools are installed via Self Service Portal prior to starting*** ***Follow these instructions as closely as possible. If a folder is specified for running terminal commands, ensure you are in that directory prior to running the command(s). If you can't complete a step, ask for help in the #bid_appeals_mac_support channel of the Benefits Integrated Delivery (BID) Slack workspace.*** @@ -57,7 +61,7 @@ Install UTM and VACOLS VM 6. Select the “Play” button when it pops up in UTM 7. Leave this running in the background. If you close the window, you can open it back up by repeating steps 5-7 -Chromedriver Installation +Chromedriver, PDFtk Server, and wkhtmltox Installation --- 1. Open terminal and run * `brew install --cask chromedriver` @@ -70,15 +74,20 @@ developer” 6. Select “Yes” from pop up 7. Reopen terminal and once again run `chromedriver –version` 8. Select “Open” +9. Download and install from this [link](https://www.pdflabs.com/tools/pdftk-the-pdf-toolkit/pdftk_server-2.02-mac_osx-10.11-setup.pkg). When you receive a security warning, follow steps 3-6 for PDFtk server +10. Download this [file](https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6-2/wkhtmltox-0.12.6-2.macos-cocoa.pkg) and move through the prompts. When you receive a security warning, follow steps 3-6 for wkhtmltox -Note: you may need to run ```sudo spctl --global-disable``` in the terminal if you have issues with security - -Install PDFtk Server and wkhtmltox +Oracle “instantclient” Files --- -1. Download and install from this [link](https://www.pdflabs.com/tools/pdftk-the-pdf-toolkit/pdftk_server-2.02-mac_osx-10.11-setup.pkg) -2. Download this [file](https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6-2/wkhtmltox-0.12.6-2.macos-cocoa.pkg) and move through the prompts +1. Download these DMG files + * [instantclient-basic-macos.x64-19.8.0.0.0dbru.dmg](https://download.oracle.com/otn_software/mac/instantclient/198000/instantclient-basic-macos.x64-19.8.0.0.0dbru.dmg) + * [instantclient-sqlplus-macos.x64-19.8.0.0.0dbru.dmg](https://download.oracle.com/otn_software/mac/instantclient/198000/instantclient-sqlplus-macos.x64-19.8.0.0.0dbru.dmg) + * [Instantclient-sdk-macos.x64-19.8.0.0.0dbru.dmg](https://download.oracle.com/otn_software/mac/instantclient/198000/instantclient-sdk-macos.x64-19.8.0.0.0dbru.dmg) +2. After downloading, double click on one of the folders and follow the instructions in INSTALL_IC_README.txt to copy the libraries -Note: you may need to run ```sudo spctl --global-disable``` in the terminal if you have issues with security +Postgres Download +--- +1. Download and install from this [link](https://github.com/PostgresApp/PostgresApp/releases/download/v2.5.8/Postgres-2.5.8-14.dmg) Configure x86_64 Homebrew --- @@ -90,8 +99,6 @@ Run the below commands **from your home directory** * ```curl -L https://github.com/Homebrew/brew/tarball/master | tar xz --strip 1 -C homebrew``` 3. If you get a chdir error, run * ``mkdir homebrew && curl -L https://github.com/Homebrew/brew/tarball/master | tar xz --strip 1 -C homebrew`` -4. Using sudo, move the homebrew directory to /usr/local/ - * ```sudo mv homebrew /usr/local/homebrew``` Rosetta --- @@ -102,50 +109,23 @@ Rosetta 4. Select “Open using Rosetta” * Note: you can copy the standard terminal executable to your desktop and enable Rosetta on that, so that you don’t need to disable rosetta on the default terminal once Caseflow setup is complete -Oracle “instantclient” Files ---- -1. Download these DMG files - * [instantclient-basic-macos.x64-19.8.0.0.0dbru.dmg](https://download.oracle.com/otn_software/mac/instantclient/198000/instantclient-basic-macos.x64-19.8.0.0.0dbru.dmg) - * [instantclient-sqlplus-macos.x64-19.8.0.0.0dbru.dmg](https://download.oracle.com/otn_software/mac/instantclient/198000/instantclient-sqlplus-macos.x64-19.8.0.0.0dbru.dmg) - * [Instantclient-sdk-macos.x64-19.8.0.0.0dbru.dmg](https://download.oracle.com/otn_software/mac/instantclient/198000/instantclient-sdk-macos.x64-19.8.0.0.0dbru.dmg) -2. After downloading, double click on one of the folders and follow the instructions in INSTALL_IC_README.txt to copy the libraries - -Postgres Download ---- -1. Download and install from this [link](https://github.com/PostgresApp/PostgresApp/releases/download/v2.5.8/Postgres-2.5.8-14.dmg) - -OpenSSL ---- -1. Download openssl@1.1 and openssl@3 from this [link](https://boozallen.sharepoint.com/teams/VABID/appeals/Documents/Forms/AllItems.aspx?id=%2Fteams%2FVABID%2Fappeals%2FDocuments%2FDevelopment%2FDeveloper%20Setup%20Resources%2FM1%20Mac%20Developer%20Setup&viewid=8a8eaf3e%2D2c12%2D4c87%2Db95f%2D4eab3428febd) -2. Open “Finder” and find the two folders under “Downloads” -3. Extract the `.tar.gz` files -4. In each of the extracted folders: - 1. Navigate to the `/usr/local/homebrew/Cellar` subfolder - 2. Copy the openssl folder to your local machine's `/usr/local/homebrew/Cellar` folder - 3. If the folder `Cellar` in `/usr/local/homebrew` does not exist, create it with `mkdir /usr/local/homebrew/Cellar` - * Note: moving these folders can be done using finder or a terminal -5. Run command (from a rosetta terminal) - 1. `brew link --force openssl@1.1` - 2. If the one above doesn’t work run: `brew link openssl@1.1 --force` - * Note: don't link openssl@3 unless you run into issues farther in the setup - Modify your .zshrc File --- 1. Run command `open ~/.zshrc` 2. Add the following lines, if any of these are already set make sure to comment previous settings: ``` -export PATH=/usr/local/homebrew/bin:${PATH} -eval "$(/usr/local/homebrew/bin/rbenv init -)" -eval "$(/usr/local/homebrew/bin/nodenv init -)" -eval "$(/usr/local/homebrew/bin/pyenv init --path)" +export PATH=~/homebrew/bin:${PATH} +eval "$(~/homebrew/bin/rbenv init -)" +eval "$(~/homebrew/bin/nodenv init -)" +eval "$(~/homebrew/bin/pyenv init --path)" # Add Postgres environment variables for CaseFlow export POSTGRES_HOST=localhost export POSTGRES_USER=postgres export POSTGRES_PASSWORD=postgres export NLS_LANG=AMERICAN_AMERICA.UTF8 -export FREEDESKTOP_MIME_TYPES_PATH=/usr/local/homebrew/share/mime/packages/freedesktop.org.xml export OCI_DIR=~/Downloads/instantclient_19_8 +export FREEDESKTOP_MIME_TYPES_PATH=~/homebrew/share/mime/packages/freedesktop.org.xml export OCI_DIR=~/Downloads/instantclient_19_8 ``` 3. Save file @@ -156,40 +136,40 @@ Run dev setup scripts in Caseflow repo --- **VERY IMPORTANT NOTE: The below commands must be run *in a Rosetta terminal* until you reach the 'Running Caseflow' section** -*Script 1* +***Script 1*** -1. Enter a **Rosetta** terminal and ensure you are in the directory you cloned Caseflow repo into (~/dev/appeals/caseflow) and run commands: +1. Open a **new Rosetta** terminal and ensure you are in the directory you cloned the Caseflow repo into (~/dev/appeals/caseflow) and run commands: 1. ```git checkout grant/setup-m1``` 2. ```./scripts/dev_env_setup_step1.sh``` * If this fails, double check your .zshrc file to ensure your PATH has only the x86_64 brew -*Script 2* +Note: If you run into errors installing any versions of openssl, see the "Installation Workarounds" section at the bottom of this document + +***Script 2*** -1. Open a **Rosetta** terminal and navigate to /usr/local, run the command ```sudo spctl --global-disable``` 2. In the **Rosetta** terminal, install pyenv and the required python2 version: 1. `brew install pyenv` - 2. `pyenv install 2.7.18` - 3. In the caseflow directory, run `pyenv local 2.7.18` to set the version + 2. `pyenv rehash` + 3. `pyenv install 2.7.18` + 4. In the caseflow directory, run `pyenv local 2.7.18` to set the version 3. In the **Rosetta** terminal navigate to caseflow folder: - 1. set ```RUBY_CONFIGURE_OPTS="--with-openssl-dir=/usr/local/homebrew/Cellar/openssl@1.1"``` - 2. run `rbenv install 2.7.3` - 3. run `gem install pg:1.1.4 -- --with-pg-config=/Applications/Postgres.app/Contents/Versions/latest/bin/pg_config` - 4. Install v8@3.15 by doing the following (these steps assume that vi/vim is the default editor): - 1. run `brew edit v8@3.15` + 1. run `rbenv install $(cat .ruby-version)` + 2. run `rbenv rehash` + 3. run `gem install bundler -v $(grep -A 1 "BUNDLED WITH" Gemfile.lock | tail -n 1)` + 4. run `gem install pg:1.1.4 -- --with-pg-config=/Applications/Postgres.app/Contents/Versions/latest/bin/pg_config` + 5. Install v8@3.15 by doing the following (these steps assume that vi/vim is the default editor): + 1. run `HOMEBREW_NO_INSTALL_FROM_API=1 brew edit v8@3.15` 2. go to line 21 in the editor by typing `:21` Note: the line being removed is `disable! date: "2023-06-19", because: "depends on Python 2 to build"` 3. delete the line by pressing `d` twice 4. save and quit by typing `:x` - 5. Configure build opts for gem `therubyracer`: + 5. run `HOMEBREW_NO_INSTALL_FROM_API=1 brew install v8@3.15` + 6. Configure build opts for gem `therubyracer`: 1. `bundle config build.libv8 --with-system-v8` 2. `bundle config build.therubyracer --with-v8-dir=$(brew --prefix v8@3.15)` - 6. run ```./scripts/dev_env_setup_step2.sh``` - If you get a permission error while running gem install or bundle install, **do not run using sudo.** - Set the permissions back to you for every directory under /.rbenv - * Enter command: `sudo chown -R /Users//.rbenv` - * For example, if my name is Eli Brown, the command will be: - `sudo chown –R elibrown /Users/elibrown/.rbenv` -4. Optional: If there are no errors messages, run `bundle install` to ensure all gems are installed + 7. run ```./scripts/dev_env_setup_step2.sh``` + If you get a permission error while running gem install or bundle install, something went wrong with your rbenv install which needs to be fixed. +4. If there are no errors messages, run `bundle install` to ensure all gems are installed Running Caseflow --- @@ -197,12 +177,12 @@ Running Caseflow 1. Once your installation of all gems is complete, switch back to a standard MacOS terminal: 1. open your ~/.zshrc file - 2. comment the line `export PATH=/usr/local/homebrew/bin:$PATH` + 2. comment the line `export PATH=~/homebrew/bin:$PATH` 3. uncomment the line `export PATH=/opt/homebrew/bin:$PATH` 4. add the line `export PATH=$HOME/.nodenv/shims:$HOME/.rbenv/shims:$HOME/.pyenv/shims:$PATH` 5. comment the lines `eval "$({binary} init -)"` for rbenv, pyenv, and nodenv if applicable - 6. if you added the line `eval $(/usr/local/homebrew/bin/brew shellenv)` after installing x86_64 homebrew, comment it out -2. Open a terminal verify: + 6. if you added the line `eval $(~/homebrew/bin/brew shellenv)` after installing x86_64 homebrew, comment it out +2. Open a new terminal and verify: 1. that you are on arm64 by doing `arch` and checking the output 2. that you are using arm64 brew by doing `which brew` and ensuring the output is `/opt/homebrew/bin/brew` 3. Open caseflow in VSCode (optional), or navigate to the caseflow directory in your terminal and: @@ -234,7 +214,33 @@ To launch caseflow after a machine restart: Note: It takes several minutes for the VACOLS VM to go through its startup and launch the Oracle DB service, and about a minute for the Postgres DB to initialize after running `make up-m1`. --- +# Installation Workarounds + +OpenSSL +--- +**When installing rbenv, nodenv, or pyenv, both openssl libraries should install as dependencies. _Only follow the below instructions if you have problems with openssl@3 or openssl@1.1 not compiling_.** + +1. Download openssl@1.1 and openssl@3 from this [link](https://boozallen.sharepoint.com/teams/VABID/appeals/Documents/Forms/AllItems.aspx?id=%2Fteams%2FVABID%2Fappeals%2FDocuments%2FDevelopment%2FDeveloper%20Setup%20Resources%2FM1%20Mac%20Developer%20Setup&viewid=8a8eaf3e%2D2c12%2D4c87%2Db95f%2D4eab3428febd) +2. Open “Finder” and find the two folders under “Downloads” +3. Extract the `.tar.gz` or `.zip` archives +4. In each of the extracted folders: + 1. Navigate to the `~/homebrew/Cellar` subfolder + 2. Copy the openssl folder to your local machine's `~/homebrew/Cellar` folder + 3. If the folder `Cellar` in `~/homebrew` does not exist, create it with `mkdir ~/homebrew/Cellar` + * Note: moving these folders can be done using finder or a terminal +5. Run command (from a rosetta terminal) + 1. `brew link --force openssl@1.1` + 2. If the one above doesn’t work run: `brew link openssl@1.1 --force` + * Note: don't link openssl@3 unless you run into issues farther in the setup + +Installing Ruby via Rbenv +--- +If you are getting errors for rbenv being unable to find a usable version of openssl, run these commands prior to running the second dev setup script: +1. `brew install openssl@1.1` +2. `export RUBY_CONFIGURE_OPTS="--with-openssl-dir=/usr/local/homebrew/Cellar/openssl@1.1"` +Running Caseflow +--- The following steps are an alternative to step 7 of the Running Caseflow section in the event that you absolutely cannot get those commands to work: 1. In caseflow, run * a. `make down` diff --git a/Makefile.example b/Makefile.example index 6b6ac721986..45cc6e80d09 100644 --- a/Makefile.example +++ b/Makefile.example @@ -156,20 +156,60 @@ audit: ## Create caseflow_audit schema, tables, and triggers in postgres bundle exec rails r db/scripts/audit/tables/create_vbms_distributions_audit.rb bundle exec rails r db/scripts/audit/tables/create_vbms_distribution_destinations_audit.rb bundle exec rails r db/scripts/audit/tables/create_vbms_uploaded_documents_audit.rb + bundle exec rails r db/scripts/audit/tables/create_priority_end_product_sync_queue_audit.rb bundle exec rails r db/scripts/audit/functions/add_row_to_appeal_states_audit_table_function.rb bundle exec rails r db/scripts/audit/functions/add_row_to_vbms_communication_packages_audit_table_function.rb bundle exec rails r db/scripts/audit/functions/add_row_to_vbms_distributions_audit_table_function.rb bundle exec rails r db/scripts/audit/functions/add_row_to_vbms_distribution_destinations_audit_table_function.rb bundle exec rails r db/scripts/audit/functions/add_row_to_vbms_uploaded_documents_audit_table_function.rb + bundle exec rails r db/scripts/audit/functions/add_row_to_priority_end_product_sync_queue_audit_table_function.rb bundle exec rails r db/scripts/audit/triggers/create_appeal_states_audit_trigger.rb bundle exec rails r db/scripts/audit/triggers/create_vbms_communication_packages_audit_trigger.rb bundle exec rails r db/scripts/audit/triggers/create_vbms_distributions_audit_trigger.rb bundle exec rails r db/scripts/audit/triggers/create_vbms_distribution_destinations_audit_trigger.rb bundle exec rails r db/scripts/audit/triggers/create_vbms_uploaded_documents_audit_trigger.rb + bundle exec rails r db/scripts/audit/triggers/create_priority_end_product_sync_queue_audit_trigger.rb audit-remove: ## Remove caseflow_audit schema, tables and triggers in postgres bundle exec rails r db/scripts/audit/remove_caseflow_audit_schema.rb +# The external-db make commands create/remove replicas (for local environment only) of external db tables that exist in Prod +# These tables should not be included as part of migrations +external-db-create: ## Creates external_vbms_ext_claim table + bundle exec rails r db/scripts/external/create_vbms_ext_claim_table.rb + +external-db-remove: ## Remove external_vbms_ext_claim table + bundle exec rails r db/scripts/external/remove_vbms_ext_claim_table.rb + +# This needs to be manually run after make reset/migrate in order for local tests involving external tables to pass. +# Otherwise the caseflow_certification_test schema will not create these tables and will error out. +external-db-create-test: ## Creates table in caseflow_certification_test DB for local RSPEC tests + bundle exec rails r -e test db/scripts/external/create_vbms_ext_claim_table.rb + +remove-vbms-ext-claim-seeds: ## Drops audit tables, removes all PriorityEndProductSyncQueue, BatchProcess, and seed-vbms-ext-claim records, then rebuilds audit tables + make audit-remove + make external-db-create + bundle exec rails r db/scripts/external/remove_vbms_ext_claim_seeds.rb + make audit + +reseed-vbms-ext-claim: remove-vbms-ext-claim-seeds seed-vbms-ext-claim ## Re-seeds database with records created from seed-vbms-ext-claim + +# Add trigger to vbms_ext_claim to populate pepsq table +add-populate-pepsq-trigger: + bundle exec rails r db/scripts/add_pepsq_populate_trigger_to_vbms_ext_claim.rb + +# Add trigger to vbms_ext_claim to populate pepsq table +add-populate-pepsq-trigger-test: + bundle exec rails r -e test db/scripts/add_pepsq_populate_trigger_to_vbms_ext_claim.rb + +# Remove populate pepsq trigger from vbms_ext_claim table +drop-populate-pepsq-trigger: + bundle exec rails r db/scripts/drop_pepsq_populate_trigger_from_vbms_ext_claim.rb + +# Remove populate pepsq trigger from vbms_ext_claim table +drop-populate-pepsq-trigger-test: + bundle exec rails r -e test db/scripts/drop_pepsq_populate_trigger_from_vbms_ext_claim.rb + c: ## Start rails console bundle exec rails console @@ -188,7 +228,7 @@ db-migrate: ## Migrate main Caseflow db db-rollback: ## Rollback main Caseflow db bundle exec rake db:rollback -migrate: etl-migrate etl-test-prepare db-migrate ## Migrate all Rails databases +migrate: external-db-remove etl-migrate etl-test-prepare db-migrate ## Migrate all non-external Rails databases rollback: etl-rollback db-rollback ## Rollback all Rails databases @@ -199,9 +239,14 @@ reset: reset-dbs seed-dbs enable-feature-flags ## Resets databases and enable fe reset-dbs: ## Resets Caseflow and ETL database schemas make audit-remove + make external-db-remove DB=etl bundle exec rake db:drop db:create db:schema:load bundle exec rake db:drop db:create db:schema:load make audit + make external-db-create + +seed-vbms-ext-claim: ## Seed only vbms_ext_claim + bundle exec rake db:seed:vbms_ext_claim seed-dbs: ## Seed all databases bundle exec rake local:vacols:seed diff --git a/app/controllers/api/v1/va_notify_controller.rb b/app/controllers/api/v1/va_notify_controller.rb index b8d18399b27..92922facc9b 100644 --- a/app/controllers/api/v1/va_notify_controller.rb +++ b/app/controllers/api/v1/va_notify_controller.rb @@ -7,41 +7,19 @@ class Api::V1::VaNotifyController < Api::ApplicationController # # Response: Update corresponding Notification status def notifications_update - if required_params[:notification_type] == "email" - email_update - elsif required_params[:notification_type] == "sms" - sms_update - end + send "#{required_params[:notification_type]}_update" end private - # Purpose: Log error in Rails logger and gives 500 error - # - # Params: Notification type string, either "email" or "SMS" - # - # Response: json error message with uuid and 500 error - def log_error(notification_type) - uuid = SecureRandom.uuid - error_msg = "An #{notification_type} notification with id #{required_params[:id]} could not be found. " \ - "Error ID: #{uuid}" - Rails.logger.error(error_msg) - render json: { message: error_msg }, status: :internal_server_error - end - # Purpose: Finds and updates notification if type is email # # Params: Params content can be found at https://vajira.max.gov/browse/APPEALS-21021 # # Response: Update corresponding email Notification status def email_update - # find notification through external id - notif = Notification.find_by(email_notification_external_id: required_params[:id]) - # log external id if notification doesn't exist - return log_error(required_params[:notification_type]) unless notif + redis.set("email_update:#{required_params[:id]}:#{required_params[:status]}", 0) - # update notification if it exists - notif.update!(email_notification_status: required_params[:status]) render json: { message: "Email notification successfully updated: ID " + required_params[:id] } end @@ -51,13 +29,8 @@ def email_update # # Response: Update corresponding SMS Notification status def sms_update - # find notification through external id - notif = Notification.find_by(sms_notification_external_id: required_params[:id]) - # log external id if notification doesn't exist - return log_error(required_params[:notification_type]) unless notif + redis.set("sms_update:#{required_params[:id]}:#{required_params[:status]}", 0) - # update notification if it exists - notif.update!(sms_notification_status: params[:status]) render json: { message: "SMS notification successfully updated: ID " + required_params[:id] } end @@ -66,4 +39,8 @@ def required_params { id: id_param, notification_type: notification_type_param, status: status_param } end + + def redis + @redis ||= Redis.new(url: Rails.application.secrets.redis_url_cache) + end end diff --git a/app/controllers/appeals_controller.rb b/app/controllers/appeals_controller.rb index 17327609881..db34eaa27fc 100644 --- a/app/controllers/appeals_controller.rb +++ b/app/controllers/appeals_controller.rb @@ -1,6 +1,8 @@ # frozen_string_literal: true +# rubocop:disable Metrics/ClassLength class AppealsController < ApplicationController + include UpdatePOAConcern before_action :react_routed before_action :set_application, only: [:document_count, :power_of_attorney, :update_power_of_attorney] # Only whitelist endpoints VSOs should have access to. @@ -48,6 +50,7 @@ def show_case_list end end + # rubocop:disable Metrics/AbcSize, Metrics/MethodLength, Layout/LineLength def fetch_notification_list appeals_id = params[:appeals_id] respond_to do |format| @@ -64,7 +67,7 @@ def fetch_notification_list pdf = PdfExportService.create_and_save_pdf("notification_report_pdf_template", appeal) send_data pdf, filename: "Notification Report " + appeals_id + " " + date + ".pdf", type: "application/pdf", disposition: :attachment else - raise ActionController::RoutingError.new('Appeal Not Found') + fail ActionController::RoutingError, "Appeal Not Found" end rescue StandardError => error uuid = SecureRandom.uuid @@ -74,13 +77,14 @@ def fetch_notification_list end end format.csv do - raise ActionController::ParameterMissing.new('Bad Format') + fail ActionController::ParameterMissing, "Bad Format" end format.html do - raise ActionController::ParameterMissing.new('Bad Format') + fail ActionController::ParameterMissing, "Bad Format" end end end + # rubocop:enable Metrics/AbcSize, Metrics/MethodLength, Layout/LineLength def document_count doc_count = EFolderService.document_count(appeal.veteran_file_number, current_user) @@ -92,26 +96,22 @@ def document_count handle_non_critical_error("document_count", error) end + # series_id is lowercase, no curly braces because it comes from url + def document_lookup + series_id = "{#{params[:series_id]}}".upcase + document = Document.find_by(series_id: series_id, file_number: appeal.veteran_file_number) + + document ||= VBMSService.fetch_document_series_for(appeal).map(&:series_id).include?(series_id) + + render json: { document_presence: document.present? } + end + def power_of_attorney render json: power_of_attorney_data end def update_power_of_attorney - clear_poa_not_found_cache - if cooldown_period_remaining > 0 - render json: { - alert_type: "info", - message: "Information is current at this time. Please try again in #{cooldown_period_remaining} minutes", - power_of_attorney: power_of_attorney_data - } - else - message, result, status = update_or_delete_power_of_attorney! - render json: { - alert_type: result, - message: message, - power_of_attorney: (status == "updated") ? power_of_attorney_data : {} - } - end + update_poa_information(appeal) rescue StandardError => error render_error(error) end @@ -248,7 +248,7 @@ def review_removed_message end def review_withdrawn_message - "You have successfully withdrawn a review." + COPY::CLAIM_REVIEW_WITHDRAWN_MESSAGE end def withdrawn_issues @@ -284,6 +284,7 @@ def mst_pact_changes? end # format MST/PACT edit success banner message + # rubocop:disable Layout/LineLength, Metrics/AbcSize, Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity, Metrics/MethodLength def mst_and_pact_edited_issues # list of edit counts mst_added = 0 @@ -292,7 +293,7 @@ def mst_and_pact_edited_issues pact_removed = 0 # get edited issues from params and reject new issues without id if !appeal.is_a?(LegacyAppeal) - existing_issues = params[:request_issues].reject { |i| i[:request_issue_id].nil? } + existing_issues = params[:request_issues].reject { |iss| iss[:request_issue_id].nil? } # get added issues new_issues = request_issues_update.after_issues - request_issues_update.before_issues @@ -302,7 +303,7 @@ def mst_and_pact_edited_issues # calculate edits existing_issues.each do |issue_edit| # find the original issue and compare MST/PACT changes - before_issue = request_issues_update.before_issues.find { |i| i.id == issue_edit[:request_issue_id].to_i } + before_issue = request_issues_update.before_issues.find { |b_issue| b_issue.id == issue_edit[:request_issue_id].to_i } # increment edit counts if they meet the criteria for added/removed mst_added += 1 if issue_edit[:mst_status] != before_issue.mst_status && issue_edit[:mst_status] @@ -338,8 +339,10 @@ def mst_and_pact_edited_issues message.flatten end + # rubocop:enable Layout/LineLength, Metrics/AbcSize, Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity, Metrics/MethodLength # create MST/PACT message for added/removed issues + # rubocop:disable Layout/LineLength, Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity def create_mst_pact_message_for_new_and_removed_issues(issues, type) special_issue_message = [] # check if any added/removed issues have MST/PACT and get the count @@ -355,40 +358,43 @@ def create_mst_pact_message_for_new_and_removed_issues(issues, type) special_issue_message end + # rubocop:enable Layout/LineLength, Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity # check if there is a change in mst/pact on legacy issue # if there is a change, creat an issue update task + # rubocop:disable Metrics/AbcSize, Metrics/MethodLength def legacy_mst_pact_updates legacy_issue_params[:request_issues].each do |current_issue| - issue = appeal.issues.find { |i| i.vacols_sequence_id == current_issue[:vacols_sequence_id].to_i } + issue = appeal.issues.find { |iss| iss.vacols_sequence_id == current_issue[:vacols_sequence_id].to_i } # Check for changes in mst/pact status - if issue.mst_status != current_issue[:mst_status] || issue.pact_status != current_issue[:pact_status] - # If there is a change : - # Create issue_update_task to populate casetimeline if there is a change - create_legacy_issue_update_task(issue, current_issue) - - # Grab record from Vacols database to issue. - # When updating an Issue, method in IssueMapper and IssueRepo requires the attrs show below in issue_attrs:{} - record = VACOLS::CaseIssue.find_by(isskey: appeal.vacols_id, issseq: current_issue[:vacols_sequence_id]) - Issue.update_in_vacols!( - vacols_id: appeal.vacols_id, - vacols_sequence_id: current_issue[:vacols_sequence_id], - issue_attrs: { - mst_status: current_issue[:mst_status] ? "Y" : "N", - pact_status: current_issue[:pact_status] ? "Y" : "N", - program: record[:issprog], - issue: record[:isscode], - level_1: record[:isslev1], - level_2: record[:isslev2], - level_3: record[:isslev3] - } - ) - end + next unless issue.mst_status != current_issue[:mst_status] || issue.pact_status != current_issue[:pact_status] + + # If there is a change : + # Create issue_update_task to populate casetimeline if there is a change + create_legacy_issue_update_task(issue, current_issue) + + # Grab record from Vacols database to issue. + # When updating an Issue, method in IssueMapper and IssueRepo requires the attrs show below in issue_attrs:{} + record = VACOLS::CaseIssue.find_by(isskey: appeal.vacols_id, issseq: current_issue[:vacols_sequence_id]) + Issue.update_in_vacols!( + vacols_id: appeal.vacols_id, + vacols_sequence_id: current_issue[:vacols_sequence_id], + issue_attrs: { + mst_status: current_issue[:mst_status] ? "Y" : "N", + pact_status: current_issue[:pact_status] ? "Y" : "N", + program: record[:issprog], + issue: record[:isscode], + level_1: record[:isslev1], + level_2: record[:isslev2], + level_3: record[:isslev3] + } + ) end set_flash_mst_edit_message render json: { issues: json_issues }, status: :ok end + # rubocop:enable Metrics/AbcSize, Metrics/MethodLength def json_issues appeal.issues.map do |issue| @@ -398,14 +404,14 @@ def json_issues def legacy_issues_with_updated_mst_pact_status mst_edited = legacy_issue_params[:request_issues].find_all do |current_issue| - issue = appeal.issues.find { |i| i.vacols_sequence_id == current_issue[:vacols_sequence_id].to_i } + issue = appeal.issues.find { |iss| iss.vacols_sequence_id == current_issue[:vacols_sequence_id].to_i } issue.mst_status != current_issue[:mst_status] end pact_edited = legacy_issue_params[:request_issues].find_all do |current_issue| - issue = appeal.issues.find { |i| i.vacols_sequence_id == current_issue[:vacols_sequence_id].to_i } + issue = appeal.issues.find { |iss| iss.vacols_sequence_id == current_issue[:vacols_sequence_id].to_i } issue.pact_status != current_issue[:pact_status] end - {mst_edited: mst_edited, pact_edited: pact_edited} + { mst_edited: mst_edited, pact_edited: pact_edited } end def legacy_issue_params @@ -432,6 +438,8 @@ def create_params legacy_issue_params.merge(vacols_id: appeal.vacols_id) end + # rubocop:disable Metrics/AbcSize, Metrics/MethodLength + # :reek:FeatureEnvy def create_legacy_issue_update_task(before_issue, current_issue) user = RequestStore[:current_user] @@ -449,21 +457,22 @@ def create_legacy_issue_update_task(before_issue, current_issue) completed_by: user ) # format the task instructions and close out - task.format_instructions( - "Edited Issue", - [ + set = CaseTimelineInstructionSet.new( + change_type: "Edited Issue", + issue_category: [ "Benefit Type: #{before_issue.labels[0]}\n", "Issue: #{before_issue.labels[1..-2].join("\n")}\n", - "Code: #{[before_issue.codes[-1], before_issue.labels[-1]].join(" - ")}\n", + "Code: #{[before_issue.codes[-1], before_issue.labels[-1]].join(' - ')}\n", "Note: #{before_issue.note}\n", "Disposition: #{before_issue.readable_disposition}\n" ].compact.join("\r\n"), - "", - before_issue.mst_status, - before_issue.pact_status, - current_issue[:mst_status], - current_issue[:pact_status] + benefit_type: "", + original_mst: before_issue.mst_status, + original_pact: before_issue.pact_status, + edit_mst: current_issue[:mst_status], + edit_pact: current_issue[:pact_status] ) + task.format_instructions(set) task.completed! # create SpecialIssueChange record to log the changes @@ -482,8 +491,10 @@ def create_legacy_issue_update_task(before_issue, current_issue) change_category: "Edited Issue" ) end + # rubocop:enable Metrics/AbcSize, Metrics/MethodLength # updated flash message to show mst/pact message if mst/pact changes (not to legacy) + # rubocop:disable Layout/LineLength def set_flash_success_message return set_flash_mst_edit_message if mst_pact_changes? && (FeatureToggle.enabled?(:mst_identification, user: RequestStore[:current_user]) || @@ -491,6 +502,7 @@ def set_flash_success_message set_flash_edit_message end + # rubocop:enable Layout/LineLength # create success message with added and removed issues def set_flash_mst_edit_message @@ -521,21 +533,6 @@ def docket_number?(search) !search.nil? && search.match?(/\d{6}-{1}\d+$/) end - def update_or_delete_power_of_attorney! - appeal.power_of_attorney&.try(:clear_bgs_power_of_attorney!) # clear memoization on legacy appeals - poa = appeal.bgs_power_of_attorney - - if poa.blank? - ["Successfully refreshed. No power of attorney information was found at this time.", "success", "blank"] - elsif poa.bgs_record == :not_found - poa.destroy! - ["Successfully refreshed. No power of attorney information was found at this time.", "success", "deleted"] - else - poa.save_with_updated_bgs_record! - ["POA Updated Successfully", "success", "updated"] - end - end - def send_initial_notification_letter # depending on the docket type, create cooresponding task as parent task case appeal.docket_type @@ -546,13 +543,15 @@ def send_initial_notification_letter when "direct_review" parent_task = @appeal.tasks.find_by(type: "DistributionTask") end - @send_initial_notification_letter ||= @appeal.tasks.open.find_by(type: :SendInitialNotificationLetterTask) || - SendInitialNotificationLetterTask.create!( - appeal: @appeal, - parent: parent_task, - assigned_to: Organization.find_by_url("clerk-of-the-board"), - assigned_by: RequestStore[:current_user] - ) unless parent_task.nil? + unless parent_task.nil? + @send_initial_notification_letter ||= @appeal.tasks.open.find_by(type: :SendInitialNotificationLetterTask) || + SendInitialNotificationLetterTask.create!( + appeal: @appeal, + parent: parent_task, + assigned_to: Organization.find_by_url("clerk-of-the-board"), + assigned_by: RequestStore[:current_user] + ) + end end def power_of_attorney_data @@ -566,34 +565,12 @@ def power_of_attorney_data } end - def clear_poa_not_found_cache - Rails.cache.delete("bgs-participant-poa-not-found-#{appeal&.veteran&.file_number}") - Rails.cache.delete("bgs-participant-poa-not-found-#{appeal&.claimant_participant_id}") - end - - def cooldown_period_remaining - next_update_allowed_at = appeal.poa_last_synced_at + 10.minutes if appeal.poa_last_synced_at.present? - if next_update_allowed_at && next_update_allowed_at > Time.zone.now - return ((next_update_allowed_at - Time.zone.now) / 60).ceil - end - - 0 - end - - def render_error(error) - Rails.logger.error("#{error.message}\n#{error.backtrace.join("\n")}") - Raven.capture_exception(error, extra: { appeal_type: appeal.type, appeal_id: appeal.id }) - render json: { - alert_type: "error", - message: "Something went wrong" - }, status: :unprocessable_entity - end - # Purpose: Fetches all notifications for an appeal # # Params: appeals_id (vacols_id OR uuid) # # Response: Returns an array of all retrieved notifications + # rubocop:disable Layout/LineLength def find_notifications_by_appeals_id(appeals_id) # Retrieve notifications based on appeals_id, excluding statuses of 'No participant_id' & 'No claimant' @all_notifications = Notification.where(appeals_id: appeals_id) @@ -608,6 +585,7 @@ def find_notifications_by_appeals_id(appeals_id) WorkQueue::NotificationSerializer.new(@allowed_notifications).serializable_hash[:data] end end + # rubocop:enable Layout/LineLength # Notification report pdf template only accepts the Appeal or Legacy Appeal object # Finds appeal object using appeals id passed through url params @@ -622,3 +600,4 @@ def get_appeal_object(appeals_id) end end end +# rubocop:enable Metrics/ClassLength diff --git a/app/controllers/claim_review_controller.rb b/app/controllers/claim_review_controller.rb index c2755e1ad6a..d4dc84d5a56 100644 --- a/app/controllers/claim_review_controller.rb +++ b/app/controllers/claim_review_controller.rb @@ -94,7 +94,7 @@ def render_success if claim_review.processed_in_caseflow? set_flash_success_message - render json: { redirect_to: claim_review.business_line.tasks_url, + render json: { redirect_to: claim_review.redirect_url, beforeIssues: request_issues_update.before_issues.map(&:serialize), afterIssues: request_issues_update.after_issues.map(&:serialize), withdrawnIssues: request_issues_update.withdrawn_issues.map(&:serialize) } @@ -136,24 +136,56 @@ def review_edited_message "You have successfully " + [added_issues, removed_issues, withdrawn_issues].compact.to_sentence + "." end + def vha_edited_decision_date_message + COPY::VHA_ADD_DECISION_DATE_TO_ISSUE_SUCCESS_MESSAGE + end + + def vha_established_message + "You have successfully established #{claimant_name}'s #{claim_review.class.review_title}" + end + + def claimant_name + if claim_review.veteran_is_not_claimant + claim_review.claimant.try(:name) + else + claim_review.veteran_full_name + end + end + + def vha_flash_message + issues_without_decision_date = (request_issues_update.after_issues - + request_issues_update.edited_issues - + request_issues_update.removed_or_withdrawn_issues) + .select { |issue| issue.decision_date.blank? && !issue.withdrawn? } + + if issues_without_decision_date.empty? + vha_established_message + elsif request_issues_update.edited_issues.any? + vha_edited_decision_date_message + else + review_edited_message + end + end + def set_flash_success_message flash[:edited] = if request_issues_update.after_issues.empty? decisions_removed_message elsif (request_issues_update.after_issues - request_issues_update.withdrawn_issues).empty? review_withdrawn_message + elsif claim_review.benefit_type == "vha" + vha_flash_message else review_edited_message end end def decisions_removed_message - claimant_name = claim_review.veteran_full_name "You have successfully removed #{claim_review.class.review_title} for #{claimant_name} (ID: #{claim_review.veteran.ssn})." end def review_withdrawn_message - "You have successfully withdrawn a review." + COPY::CLAIM_REVIEW_WITHDRAWN_MESSAGE end def claim_label_edit_params diff --git a/app/controllers/concerns/update_poa_concern.rb b/app/controllers/concerns/update_poa_concern.rb new file mode 100644 index 00000000000..e5a99265962 --- /dev/null +++ b/app/controllers/concerns/update_poa_concern.rb @@ -0,0 +1,64 @@ +# frozen_string_literal: true + +module UpdatePOAConcern + extend ActiveSupport::Concern + # these two methods were previously in appeals controller trying to see if they can be brought here. + + def clear_poa_not_found_cache(appeal) + Rails.cache.delete("bgs-participant-poa-not-found-#{appeal&.veteran&.file_number}") + Rails.cache.delete("bgs-participant-poa-not-found-#{appeal&.claimant_participant_id}") + end + + def cooldown_period_remaining(appeal) + next_update_allowed_at = appeal.poa_last_synced_at + 10.minutes if appeal.poa_last_synced_at.present? + if next_update_allowed_at && next_update_allowed_at > Time.zone.now + return ((next_update_allowed_at - Time.zone.now) / 60).ceil + end + + 0 + end + + def update_or_delete_power_of_attorney!(appeal) + appeal.power_of_attorney&.try(:clear_bgs_power_of_attorney!) # clear memoization on legacy appeals + poa = appeal.bgs_power_of_attorney + if poa.blank? + [COPY::POA_SUCCESSFULLY_REFRESH_MESSAGE, "success", "blank"] + elsif poa.bgs_record == :not_found + poa.destroy! + [COPY::POA_SUCCESSFULLY_REFRESH_MESSAGE, "success", "deleted"] + else + poa.save_with_updated_bgs_record! + [COPY::POA_UPDATED_SUCCESSFULLY, "success", "updated"] + end + rescue StandardError => error + [error, "error", "updated"] + end + + def update_poa_information(appeal) + clear_poa_not_found_cache(appeal) + cooldown_period = cooldown_period_remaining(appeal) + if cooldown_period > 0 + render json: { + alert_type: "info", + message: "Information is current at this time. Please try again in #{cooldown_period} minutes", + power_of_attorney: power_of_attorney_data + } + else + message, result, status = update_or_delete_power_of_attorney!(appeal) + render json: { + alert_type: result, + message: message, + power_of_attorney: (status == "updated") ? power_of_attorney_data : {} + } + end + end + + def render_error(error) + Rails.logger.error("#{error.message}\n#{error.backtrace.join("\n")}") + Raven.capture_exception(error, extra: { appeal_type: appeal.type, appeal_id: appeal.id }) + render json: { + alert_type: "error", + message: "Something went wrong" + }, status: :unprocessable_entity + end +end diff --git a/app/controllers/decision_reviews_controller.rb b/app/controllers/decision_reviews_controller.rb index 2cee92408a3..687c6066c52 100644 --- a/app/controllers/decision_reviews_controller.rb +++ b/app/controllers/decision_reviews_controller.rb @@ -2,16 +2,21 @@ class DecisionReviewsController < ApplicationController include GenericTaskPaginationConcern + include UpdatePOAConcern before_action :verify_access, :react_routed, :set_application before_action :verify_veteran_record_access, only: [:show] - delegate :in_progress_tasks, + delegate :incomplete_tasks, + :incomplete_tasks_type_counts, + :incomplete_tasks_issue_type_counts, + :in_progress_tasks, :in_progress_tasks_type_counts, :in_progress_tasks_issue_type_counts, :completed_tasks, :completed_tasks_type_counts, :completed_tasks_issue_type_counts, + :included_tabs, to: :business_line SORT_COLUMN_MAPPINGS = { @@ -81,15 +86,43 @@ def business_line end def task_filter_details + task_filter_hash = {} + included_tabs.each do |tab_name| + case tab_name + when :incomplete + task_filter_hash[:incomplete] = incomplete_tasks_type_counts + task_filter_hash[:incomplete_issue_types] = incomplete_tasks_issue_type_counts + when :in_progress + task_filter_hash[:in_progress] = in_progress_tasks_type_counts + task_filter_hash[:in_progress_issue_types] = in_progress_tasks_issue_type_counts + when :completed + task_filter_hash[:completed] = completed_tasks_type_counts + task_filter_hash[:completed_issue_types] = completed_tasks_issue_type_counts + else + fail NotImplementedError "Tab name type not implemented for this business line: #{business_line}" + end + end + task_filter_hash + end + + def business_line_config_options { - in_progress: in_progress_tasks_type_counts, - completed: completed_tasks_type_counts, - in_progress_issue_types: in_progress_tasks_issue_type_counts, - completed_issue_types: completed_tasks_issue_type_counts + tabs: included_tabs } end - helper_method :task_filter_details, :business_line, :task + def power_of_attorney + render json: power_of_attorney_data + end + + def update_power_of_attorney + appeal = task.appeal + update_poa_information(appeal) + rescue StandardError => error + render_error(error) + end + + helper_method :task_filter_details, :business_line, :task, :business_line_config_options private @@ -110,13 +143,14 @@ def decision_issue_params def queue_tasks tab_name = allowed_params[Constants.QUEUE_CONFIG.TAB_NAME_REQUEST_PARAM.to_sym] - return missing_tab_parameter_error unless tab_name - sort_by_column = SORT_COLUMN_MAPPINGS[allowed_params[Constants.QUEUE_CONFIG.SORT_COLUMN_REQUEST_PARAM.to_sym]] tasks = case tab_name + when "incomplete" then incomplete_tasks(pagination_query_params(sort_by_column)) when "in_progress" then in_progress_tasks(pagination_query_params(sort_by_column)) when "completed" then completed_tasks(pagination_query_params(sort_by_column)) + when nil + return missing_tab_parameter_error else return unrecognized_tab_name_error end @@ -174,4 +208,15 @@ def allowed_params decision_issues: [:description, :disposition, :request_issue_id] ) end + + def power_of_attorney_data + { + representative_type: task.appeal&.representative_type, + representative_name: task.appeal&.representative_name, + representative_address: task.appeal&.representative_address, + representative_email_address: task.appeal&.representative_email_address, + representative_tz: task.appeal&.representative_tz, + poa_last_synced_at: task.appeal&.poa_last_synced_at + } + end end diff --git a/app/controllers/dispatch_stats_controller.rb b/app/controllers/dispatch_stats_controller.rb deleted file mode 100644 index 15492388010..00000000000 --- a/app/controllers/dispatch_stats_controller.rb +++ /dev/null @@ -1,36 +0,0 @@ -# frozen_string_literal: true - -require "json" - -class DispatchStatsController < ApplicationController - before_action :verify_authentication - before_action :verify_access - - def show - # deprecated 2019/08/28 - # either remove this controller entirely or render 404. - render "errors/404", layout: "application", status: :not_found - - @stats = { - hourly: 0...24, - daily: 0...30, - weekly: 0...26, - monthly: 0...24 - }[interval].map { |i| DispatchStats.offset(time: DispatchStats.now, interval: interval, offset: i) } - end - - def logo_name - "Dispatch" - end - - def interval - @interval ||= DispatchStats::INTERVALS.find { |i| i.to_s == params[:interval] } || :hourly - end - helper_method :interval - - private - - def verify_access - verify_authorized_roles("Manage Claim Establishment") - end -end diff --git a/app/controllers/help_controller.rb b/app/controllers/help_controller.rb index 17d7f610e3f..b03af1f4e3d 100644 --- a/app/controllers/help_controller.rb +++ b/app/controllers/help_controller.rb @@ -5,7 +5,8 @@ class HelpController < ApplicationController def feature_toggle_ui_hash(user = current_user) { - programOfficeTeamManagement: FeatureToggle.enabled?(:program_office_team_management, user: user) + programOfficeTeamManagement: FeatureToggle.enabled?(:program_office_team_management, user: user), + metricsBrowserError: FeatureToggle.enabled?(:metrics_browser_error, user: current_user) } end diff --git a/app/controllers/idt/api/v2/distributions_controller.rb b/app/controllers/idt/api/v2/distributions_controller.rb index 2b35a8435ff..443bf8e6040 100644 --- a/app/controllers/idt/api/v2/distributions_controller.rb +++ b/app/controllers/idt/api/v2/distributions_controller.rb @@ -38,16 +38,19 @@ def pending_establishment(distribution_id) def format_response(response) response_body = response.raw_body - begin - parsed_response = JSON.parse(response_body) + parsed_response = if [ActiveSupport::HashWithIndifferentAccess, Hash].include?(response_body.class) + response_body + else + JSON.parse(response_body) + end # Convert keys from camelCase to snake_case parsed_response.deep_transform_keys do |key| key.to_s.underscore.gsub(/e(\d)/, 'e_\1') end - rescue JSON::ParseError => error - log_error(error + " Distribution ID: #{params[:distribution_id]}") + rescue StandardError => error + log_error(error) response_body end diff --git a/app/controllers/intakes_controller.rb b/app/controllers/intakes_controller.rb index dc0ae020f41..37e21cb3ba0 100644 --- a/app/controllers/intakes_controller.rb +++ b/app/controllers/intakes_controller.rb @@ -56,9 +56,10 @@ def review def complete intake.complete!(params) + if !detail.is_a?(Appeal) && detail.try(:processed_in_caseflow?) - flash[:success] = success_message - render json: { serverIntake: { redirect_to: detail.business_line.tasks_url } } + flash[:success] = (detail.benefit_type == "vha") ? vha_success_message : success_message + render json: { serverIntake: { redirect_to: detail.try(:redirect_url) || business_line.tasks_url } } else render json: intake.ui_hash end @@ -157,7 +158,8 @@ def feature_toggle_ui_hash justificationReason: FeatureToggle.enabled?(:justification_reason, user: current_user), updatedAppealForm: FeatureToggle.enabled?(:updated_appeal_form, user: current_user), hlrScUnrecognizedClaimants: FeatureToggle.enabled?(:hlr_sc_unrecognized_claimants, user: current_user), - vhaClaimReviewEstablishment: FeatureToggle.enabled?(:vha_claim_review_establishment, user: current_user) + vhaClaimReviewEstablishment: FeatureToggle.enabled?(:vha_claim_review_establishment, user: current_user), + metricsBrowserError: FeatureToggle.enabled?(:metrics_browser_error, user: current_user) } end @@ -193,9 +195,23 @@ def detail @detail ||= intake&.detail end + def claimant_name + if detail.veteran_is_not_claimant + detail.claimant.try(:name) + else + detail.veteran_full_name + end + end + def success_message - claimant_name = detail.veteran_full_name - claimant_name = detail.claimant.try(:name) if detail.veteran_is_not_claimant "#{claimant_name} (Veteran SSN: #{detail.veteran.ssn}) #{detail.class.review_title} has been processed." end + + def vha_success_message + if detail.request_issues_without_decision_dates? + "You have successfully saved #{claimant_name}'s #{detail.class.review_title}" + else + "You have successfully established #{claimant_name}'s #{detail.class.review_title}" + end + end end diff --git a/app/controllers/issues_controller.rb b/app/controllers/issues_controller.rb index 71b2b4b48b7..fa6d8052b65 100644 --- a/app/controllers/issues_controller.rb +++ b/app/controllers/issues_controller.rb @@ -17,6 +17,7 @@ class IssuesController < ApplicationController handle_non_critical_error("issues", e) end + # rubocop:disable Layout/LineLength def create return record_not_found unless appeal @@ -25,17 +26,19 @@ def create # create MST/PACT task if issue was created if convert_to_bool(create_params[:mst_status]) || convert_to_bool(create_params[:pact_status]) - issue_in_caseflow = appeal.issues.find { |i| i.vacols_sequence_id == issue.issseq.to_i } + issue_in_caseflow = appeal.issues.find { |iss| iss.vacols_sequence_id == issue.issseq.to_i } create_legacy_issue_update_task(issue_in_caseflow) if FeatureToggle.enabled?(:legacy_mst_pact_identification, user: RequestStore[:current_user]) end render json: { issues: json_issues }, status: :created end + # rubocop:enable Layout/LineLength + # rubocop:disable Layout/LineLength, Metrics/AbcSize def update return record_not_found unless appeal - issue = appeal.issues.find { |i| i.vacols_sequence_id == params[:vacols_sequence_id].to_i } + issue = appeal.issues.find { |iss| iss.vacols_sequence_id == params[:vacols_sequence_id].to_i } if issue.mst_status != convert_to_bool(params[:issues][:mst_status]) || issue.pact_status != convert_to_bool(params[:issues][:pact_status]) create_legacy_issue_update_task(issue) if FeatureToggle.enabled?(:legacy_mst_pact_identification, user: RequestStore[:current_user]) @@ -52,6 +55,7 @@ def update render json: { issues: json_issues }, status: :ok end + # rubocop:enable Layout/LineLength, Metrics/AbcSize def destroy return record_not_found unless appeal @@ -65,6 +69,7 @@ def destroy private + # rubocop:disable Layout/LineLength, Metrics/AbcSize, Metrics/CyclomaticComplexity, Metrics/MethodLength, Metrics/PerceivedComplexity def create_legacy_issue_update_task(issue) user = current_user @@ -101,21 +106,22 @@ def create_legacy_issue_update_task(issue) level_1_description = level_1_code.nil? ? "N/A" : param_issue["levels"][issue_code]["levels"][level_1_code]["description"] # format the task instructions and close out - task.format_instructions( - change_category, - [ + set = CaseTimelineInstructionSet.new( + change_type: change_category, + issue_category: [ "Benefit Type: #{param_issue['description']}\n", "Issue: #{iss}\n", - "Code: #{[level_1_code, level_1_description].join(" - ")}\n", + "Code: #{[level_1_code, level_1_description].join(' - ')}\n", "Note: #{note}\n", "Disposition: #{disposition}\n" ].compact.join("\r\n"), - "", - issue.mst_status, - issue.pact_status, - updated_mst_status, - updated_pact_status + benefit_type: "", + original_mst: issue.mst_status, + original_pact: issue.pact_status, + edit_mst: updated_mst_status, + edit_pact: updated_pact_status ) + task.format_instructions(set) task.completed! # create SpecialIssueChange record to log the changes SpecialIssueChange.create!( @@ -133,6 +139,7 @@ def create_legacy_issue_update_task(issue) change_category: change_category ) end + # rubocop:enable Layout/LineLength, Metrics/AbcSize, Metrics/CyclomaticComplexity, Metrics/MethodLength, Metrics/PerceivedComplexity def convert_to_bool(status) status == "Y" diff --git a/app/controllers/metrics/dashboard_controller.rb b/app/controllers/metrics/dashboard_controller.rb new file mode 100644 index 00000000000..232aeaa9d1b --- /dev/null +++ b/app/controllers/metrics/dashboard_controller.rb @@ -0,0 +1,24 @@ +# frozen_string_literal: true + +class Metrics::DashboardController < ApplicationController + before_action :require_demo + + def show + no_cache + + @metrics = Metric.includes(:user).where("created_at > ?", 1.hour.ago).order(created_at: :desc) + + begin + render :show, layout: "plain_application" + rescue StandardError => error + Rails.logger.error(error.full_message) + raise error.full_message + end + end + + private + + def require_demo + redirect_to "/unauthorized" unless Rails.deploy_env?(:demo) + end +end diff --git a/app/controllers/metrics/v2/logs_controller.rb b/app/controllers/metrics/v2/logs_controller.rb new file mode 100644 index 00000000000..b947ab44418 --- /dev/null +++ b/app/controllers/metrics/v2/logs_controller.rb @@ -0,0 +1,39 @@ +# frozen_string_literal: true + +class Metrics::V2::LogsController < ApplicationController + skip_before_action :verify_authentication + + def create + return metrics_not_saved unless FeatureToggle.enabled?(:metrics_monitoring, user: current_user) + + metric = Metric.create_metric_from_rest(self, allowed_params, current_user) + failed_metric_info = metric&.errors.inspect || allowed_params[:message] + Rails.logger.info("Failed to create metric #{failed_metric_info}") unless metric&.valid? + + head :ok + end + + private + + def metrics_not_saved + render json: { error_code: "Metrics not saved for user" }, status: :unprocessable_entity + end + + def allowed_params + params.require(:metric).permit(:uuid, + :name, + :group, + :message, + :type, + :product, + :app_name, + :metric_attributes, + :additional_info, + :sent_to, + :sent_to_info, + :relevant_tables_info, + :start, + :end, + :duration) + end +end diff --git a/app/controllers/stats_controller.rb b/app/controllers/stats_controller.rb deleted file mode 100644 index 9a4ccefacf6..00000000000 --- a/app/controllers/stats_controller.rb +++ /dev/null @@ -1,10 +0,0 @@ -# frozen_string_literal: true - -class StatsController < ApplicationController - before_action :verify_authentication - before_action :verify_access - - def verify_access - verify_system_admin - end -end diff --git a/app/controllers/tasks_controller.rb b/app/controllers/tasks_controller.rb index 2aebe30f96f..7cadcde95aa 100644 --- a/app/controllers/tasks_controller.rb +++ b/app/controllers/tasks_controller.rb @@ -30,6 +30,7 @@ class TasksController < ApplicationController EducationDocumentSearchTask: EducationDocumentSearchTask, FoiaTask: FoiaTask, HearingAdminActionTask: HearingAdminActionTask, + HearingPostponementRequestMailTask: HearingPostponementRequestMailTask, InformalHearingPresentationTask: InformalHearingPresentationTask, JudgeAddressMotionToVacateTask: JudgeAddressMotionToVacateTask, JudgeAssignTask: JudgeAssignTask, diff --git a/app/controllers/test/users_controller.rb b/app/controllers/test/users_controller.rb index bf6f2d93bb8..f24ec9c2e7f 100644 --- a/app/controllers/test/users_controller.rb +++ b/app/controllers/test/users_controller.rb @@ -62,7 +62,8 @@ class Test::UsersController < ApplicationController stats: "/stats", jobs: "/jobs", admin: "/admin", - test_veterans: "/test/data" + test_veterans: "/test/data", + metrics_dashboard: "/metrics/dashboard" } } ].freeze @@ -177,7 +178,9 @@ def user_session helper_method :user_session def veteran_records - redirect_to "/unauthorized" if Rails.deploy_env?(:prod) || Rails.deploy_env?(:preprod) + redirect_to "/unauthorized" if Rails.deploy_env?(:prod) || \ + Rails.deploy_env?(:prodtest) || \ + Rails.deploy_env?(:preprod) build_veteran_profile_records end diff --git a/app/controllers/unrecognized_appellants_controller.rb b/app/controllers/unrecognized_appellants_controller.rb index 624922c9cec..f7df830ac69 100644 --- a/app/controllers/unrecognized_appellants_controller.rb +++ b/app/controllers/unrecognized_appellants_controller.rb @@ -42,7 +42,7 @@ def unrecognized_appellant_params def unrecognized_party_details [ :party_type, :name, :middle_name, :last_name, :suffix, :address_line_1, :address_line_2, :date_of_birth, - :address_line_3, :city, :state, :zip, :country, :phone_number, :email_address + :address_line_3, :city, :state, :zip, :country, :phone_number, :email_address, :ein, :ssn ] end end diff --git a/app/jobs/ama_notification_efolder_sync_job.rb b/app/jobs/ama_notification_efolder_sync_job.rb index 99813809bc2..a015cdbcb3d 100644 --- a/app/jobs/ama_notification_efolder_sync_job.rb +++ b/app/jobs/ama_notification_efolder_sync_job.rb @@ -15,9 +15,15 @@ class AmaNotificationEfolderSyncJob < CaseflowJob def perform RequestStore[:current_user] = User.system_user - all_active_ama_appeals = appeals_recently_outcoded + appeals_never_synced + ready_for_resync - - sync_notification_reports(all_active_ama_appeals.first(BATCH_LIMIT.to_i)) + all_active_ama_appeals = if FeatureToggle.enabled?(:phase_1_notification_sync_job_rollout) + appeals_never_synced + elsif FeatureToggle.enabled?(:phase_2_notification_sync_job_rollout) + appeals_never_synced + ready_for_resync + else + appeals_recently_outcoded + appeals_never_synced + ready_for_resync + end + + sync_notification_reports(all_active_ama_appeals.uniq(&:id).first(BATCH_LIMIT.to_i)) end private @@ -98,18 +104,21 @@ def ready_for_resync # Return: Array of active appeals def get_appeals_from_prev_synced_ids(appeal_ids) appeal_ids.in_groups_of(1000, false).flat_map do |ids| - Appeal.active.find_by_sql( + Appeal.find_by_sql( <<-SQL - SELECT appeals.* - FROM appeals + SELECT appeals.* FROM appeals + JOIN tasks t ON appeals.id = t.appeal_id + AND t.appeal_type = 'Appeal' JOIN (#{appeals_on_latest_notifications(ids)}) AS notifs ON notifs.appeals_id = appeals."uuid"::text AND notifs.appeals_type = 'Appeal' JOIN (#{appeals_on_latest_doc_uploads(ids)}) AS vbms_uploads ON vbms_uploads.appeal_id = appeals.id AND vbms_uploads.appeal_type = 'Appeal' - WHERE + WHERE ( notifs.notified_at > vbms_uploads.attempted_at OR notifs.created_at > vbms_uploads.attempted_at + ) + AND t.TYPE = 'RootTask' AND t.status NOT IN ('completed', 'cancelled') GROUP BY appeals.id SQL ) @@ -120,8 +129,16 @@ def appeals_on_latest_notifications(appeal_ids) <<-SQL SELECT n1.* FROM appeals a JOIN notifications n1 on n1.appeals_id = a."uuid"::text AND n1.appeals_type = 'Appeal' - LEFT OUTER JOIN notifications n2 ON (n2.appeals_id = a."uuid"::text AND n1.appeals_type = 'Appeal' AND - (n1.notified_at < n2.notified_at OR (n1.notified_at = n2.notified_at AND n1.id < n2.id))) + AND (n1.email_notification_status IS NULL OR + n1.email_notification_status NOT IN ('No Participant Id Found', 'No Claimant Found', 'No External Id')) + AND (n1.sms_notification_status IS NULL OR + n1.sms_notification_status NOT IN ('No Participant Id Found', 'No Claimant Found', 'No External Id')) + LEFT OUTER JOIN notifications n2 ON (n2.appeals_id = a."uuid"::text AND n1.appeals_type = 'Appeal' + AND (n2.email_notification_status IS NULL OR + n2.email_notification_status NOT IN ('No Participant Id Found', 'No Claimant Found', 'No External Id')) + AND (n2.sms_notification_status IS NULL OR + n2.sms_notification_status NOT IN ('No Participant Id Found', 'No Claimant Found', 'No External Id')) + AND (n1.notified_at < n2.notified_at OR (n1.notified_at = n2.notified_at AND n1.id < n2.id))) WHERE n2.id IS NULL AND n1.id IS NOT NULL AND (n1.email_notification_status <> 'Failure Due to Deceased' diff --git a/app/jobs/batch_processes/batch_process_rescue_job.rb b/app/jobs/batch_processes/batch_process_rescue_job.rb new file mode 100644 index 00000000000..890c6820c28 --- /dev/null +++ b/app/jobs/batch_processes/batch_process_rescue_job.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +# This job will search for and reprocess unfinished Batch Processes nightly. +# Search Criteria is for Batch Processes that are in an unfinished state ('PRE_PROCESSING', 'PROCESSING') & +# have a created_at date/time that is greater than the ERROR_DELAY defined within batch_process.rb +class BatchProcessRescueJob < CaseflowJob + queue_with_priority :low_priority + + # :reek:FeatureEnvy + def perform + batches = BatchProcess.needs_reprocessing + if batches.any? + batches.each do |batch| + begin + batch.process_batch! + rescue StandardError => error + log_error(error, extra: { active_job_id: job_id.to_s, job_time: Time.zone.now.to_s }) + slack_msg = "Error running #{self.class.name}. Error: #{error.message}. Active Job ID: #{job_id}." + slack_msg += " See Sentry event #{Raven.last_event_id}." if Raven.last_event_id.present? + slack_service.send_notification("[ERROR] #{slack_msg}", self.class.to_s) + next + end + end + else + Rails.logger.info("No Unfinished Batches Could Be Identified. Time: #{Time.zone.now}.") + end + end +end diff --git a/app/jobs/batch_processes/priority_ep_sync_batch_process_job.rb b/app/jobs/batch_processes/priority_ep_sync_batch_process_job.rb new file mode 100644 index 00000000000..539ed5a050f --- /dev/null +++ b/app/jobs/batch_processes/priority_ep_sync_batch_process_job.rb @@ -0,0 +1,82 @@ +# frozen_string_literal: true + +class PriorityEpSyncBatchProcessJob < CaseflowJob + queue_with_priority :low_priority + + # Using macro-style definition. The locking scope will be TheClass#method and only one method can run at any + # given time. + include RedisMutex::Macro + + # Default options for RedisMutex#with_lock + # :block => 1 # Specify in seconds how long you want to wait for the lock to be released. + # # Specify 0 if you need non-blocking sematics and return false immediately. (default: 1) + # :sleep => 0.1 # Specify in seconds how long the polling interval should be when :block is given. + # # It is NOT recommended to go below 0.01. (default: 0.1) + # :expire => 10 # Specify in seconds when the lock should be considered stale when something went wrong + # # with the one who held the lock and failed to unlock. (default: 10) + # + # RedisMutex.with_lock("PriorityEpSyncBatchProcessJob", block: 60, expire: 100) + # Key => "PriorityEpSyncBatchProcessJob" + + JOB_DURATION ||= ENV["BATCH_PROCESS_JOB_DURATION"].to_i.minutes + SLEEP_DURATION ||= ENV["BATCH_PROCESS_SLEEP_DURATION"].to_i + + # Attempts to create & process batches for 50 minutes + # There will be a 5 second rest between each iteration + # Job will end if there are no records are left to batch + + # rubocop:disable Metrics/MethodLength, Metrics/AbcSize, Metrics/CyclomaticComplexity + def perform + setup_job + loop do + break if job_running_past_expected_end_time? || should_stop_job + + begin + batch = nil + RedisMutex.with_lock("PriorityEpSyncBatchProcessJob", block: 60, expire: 100) do + batch = ActiveRecord::Base.transaction do + records_to_batch = PriorityEpSyncBatchProcess.find_records_to_batch + next if records_to_batch.empty? + + PriorityEpSyncBatchProcess.create_batch!(records_to_batch) + end + end + + batch ? batch.process_batch! : stop_job(log_no_records_found: true) + + sleep(SLEEP_DURATION) + rescue StandardError => error + log_error(error, extra: { job_id: job_id.to_s, job_time: Time.zone.now.to_s }) + slack_msg = "Error running #{self.class.name}. Error: #{error.message}. Active Job ID: #{job_id}." + slack_msg += " See Sentry event #{Raven.last_event_id}." if Raven.last_event_id.present? + slack_service.send_notification("[ERROR] #{slack_msg}", self.class.to_s) + stop_job + end + end + end + # rubocop:enable Metrics/MethodLength, Metrics/AbcSize, Metrics/CyclomaticComplexity + + private + + attr_accessor :job_expected_end_time, :should_stop_job + + def setup_job + RequestStore.store[:current_user] = User.system_user + @should_stop_job = false + @job_expected_end_time = Time.zone.now + JOB_DURATION + end + + def job_running_past_expected_end_time? + Time.zone.now > job_expected_end_time + end + + # :reek:BooleanParameter + # :reek:ControlParameter + def stop_job(log_no_records_found: false) + self.should_stop_job = true + if log_no_records_found + Rails.logger.info("#{self.class} Cannot Find Any Records to Batch."\ + " Job will be enqueued again at the top of the hour. Active Job ID: #{job_id}. Time: #{Time.zone.now}") + end + end +end diff --git a/app/jobs/bgs_share_error_fix_job.rb b/app/jobs/bgs_share_error_fix_job.rb new file mode 100644 index 00000000000..9fccbd6f9ec --- /dev/null +++ b/app/jobs/bgs_share_error_fix_job.rb @@ -0,0 +1,78 @@ +# frozen_string_literal: true + +class BgsShareErrorFixJob < CaseflowJob + ERROR_TEXT = "ShareError" + STUCK_JOB_REPORT_SERVICE = StuckJobReportService.new + + def perform + clear_hlr_errors if hlrs_with_errors.present? + clear_rius_errors if rius_with_errors.present? + clear_bge_errors if bges_with_errors.present? + STUCK_JOB_REPORT_SERVICE.write_log_report(ERROR_TEXT) + end + + def clear_rius_errors + STUCK_JOB_REPORT_SERVICE.append_record_count(rius_with_errors.count, ERROR_TEXT) + rius_with_errors.each do |riu| + epe = EndProductEstablishment.find_by( + id: riu.review_id + ) + next if epe.established_at.blank? + + resolve_error_on_records(riu) + STUCK_JOB_REPORT_SERVICE.append_single_record(riu.class.name, riu.id) + end + STUCK_JOB_REPORT_SERVICE.append_record_count(rius_with_errors.count, ERROR_TEXT) + end + + def clear_hlr_errors + STUCK_JOB_REPORT_SERVICE.append_record_count(hlrs_with_errors.count, ERROR_TEXT) + + hlrs_with_errors.each do |hlr| + epe = EndProductEstablishment.find_by( + veteran_file_number: hlr.veteran_file_number + ) + next if epe.established_at.blank? + + resolve_error_on_records(hlr) + STUCK_JOB_REPORT_SERVICE.append_single_record(hlr.class.name, hlr.id) + end + STUCK_JOB_REPORT_SERVICE.append_record_count(hlrs_with_errors.count, ERROR_TEXT) + end + + def clear_bge_errors + STUCK_JOB_REPORT_SERVICE.append_record_count(bges_with_errors.count, ERROR_TEXT) + + bges_with_errors.each do |bge| + next if bge.end_product_establishment.established_at.blank? + + resolve_error_on_records(bge) + STUCK_JOB_REPORT_SERVICE.append_single_record(bge.class.name, bge.id) + end + STUCK_JOB_REPORT_SERVICE.append_record_count(bges_with_errors.count, ERROR_TEXT) + end + + def hlrs_with_errors + HigherLevelReview.where("establishment_error ILIKE?", "%#{ERROR_TEXT}%") + end + + def rius_with_errors + RequestIssuesUpdate.where("error ILIKE?", "%#{ERROR_TEXT}%") + end + + def bges_with_errors + BoardGrantEffectuation.where("decision_sync_error ILIKE?", "%#{ERROR_TEXT}%") + end + + private + + # :reek:FeatureEnvy + def resolve_error_on_records(object_type) + ActiveRecord::Base.transaction do + object_type.clear_error! + rescue StandardError => error + log_error(error) + STUCK_JOB_REPORT_SERVICE.append_errors(object_type.class.name, object_type.id, error) + end + end +end diff --git a/app/jobs/calculate_dispatch_stats_job.rb b/app/jobs/calculate_dispatch_stats_job.rb deleted file mode 100644 index d22beb1fa3a..00000000000 --- a/app/jobs/calculate_dispatch_stats_job.rb +++ /dev/null @@ -1,12 +0,0 @@ -# frozen_string_literal: true - -class CalculateDispatchStatsJob < ApplicationJob - queue_with_priority :low_priority - application_attr :dispatch - - # :nocov: - def perform - DispatchStats.throttled_calculate_all! - end - # :nocov: -end diff --git a/app/jobs/cannot_delete_contention_remediation_job.rb b/app/jobs/cannot_delete_contention_remediation_job.rb index c1547b1d08a..ef6b83cd07c 100644 --- a/app/jobs/cannot_delete_contention_remediation_job.rb +++ b/app/jobs/cannot_delete_contention_remediation_job.rb @@ -6,9 +6,13 @@ class CannotDeleteContentionRemediationJob < CaseflowJob queue_with_priority :low_priority + # Sub folder name + S3_FOLDER_NAME = "data-remediation-output" + def initialize @logs = ["\nVBMS::CannotDeleteContention Remediation Log"] @remediated_request_issues_update_ids = [] + @folder_name = (Rails.deploy_env == :prod) ? S3_FOLDER_NAME : "#{S3_FOLDER_NAME}-#{Rails.deploy_env}" super end @@ -166,28 +170,10 @@ def sync_epe!(request_issues_update, request_issue, index) " Resetting EPE synced_status to null. Syncing Epe with EP.") end - # Save Logs to S3 Bucket def store_logs_in_s3_bucket - # Set Client Resources for AWS - Aws.config.update(region: "us-gov-west-1") - s3client = Aws::S3::Client.new - s3resource = Aws::S3::Resource.new(client: s3client) - s3bucket = s3resource.bucket("data-remediation-output") - # Folder and File name - file_name = "cannot-delete-contention-remediation-logs/cdc-remediation-log-#{Time.zone.now}" - - # Store contents of logs array in a temporary file content = @logs.join("\n") - temporary_file = Tempfile.new("cdc-log.txt") - filepath = temporary_file.path - temporary_file.write(content) - temporary_file.flush - - # Store File in S3 bucket - s3bucket.object(file_name).upload_file(filepath, acl: "private", server_side_encryption: "AES256") - - # Delete Temporary File - temporary_file.close! + file_name = "cannot-delete-contention-remediation-logs/cdc-remediation-log-#{Time.zone.now}" + S3Service.store_file("#{@folder_name}/#{file_name}", content) end end diff --git a/app/jobs/claim_date_dt_fix_job.rb b/app/jobs/claim_date_dt_fix_job.rb new file mode 100644 index 00000000000..fbbffe6cc31 --- /dev/null +++ b/app/jobs/claim_date_dt_fix_job.rb @@ -0,0 +1,50 @@ +# frozen_string_literal: true + +class ClaimDateDtFixJob < CaseflowJob + ERROR_TEXT = "ClaimDateDt" + + attr_reader :stuck_job_report_service + + def initialize + @stuck_job_report_service = StuckJobReportService.new + end + + def perform + process_decision_documents + end + + def process_decision_documents + return if decision_docs_with_errors.blank? + + stuck_job_report_service.append_record_count(decision_docs_with_errors.count, ERROR_TEXT) + + decision_docs_with_errors.each do |single_decision_document| + next unless valid_decision_document?(single_decision_document) + + process_decision_document(single_decision_document) + end + + stuck_job_report_service.append_record_count(decision_docs_with_errors.count, ERROR_TEXT) + + stuck_job_report_service.write_log_report(ERROR_TEXT) + end + + def valid_decision_document?(decision_document) + decision_document.processed_at.present? && + decision_document.uploaded_to_vbms_at.present? + end + + # :reek:FeatureEnvy + def process_decision_document(decision_document) + ActiveRecord::Base.transaction do + decision_document.clear_error! + rescue StandardError => error + log_error(error) + stuck_job_report_service.append_errors(decision_document.class.name, decision_document.id, error) + end + end + + def decision_docs_with_errors + DecisionDocument.where("error ILIKE ?", "%#{ERROR_TEXT}%") + end +end diff --git a/app/jobs/claim_not_established_fix_job.rb b/app/jobs/claim_not_established_fix_job.rb new file mode 100644 index 00000000000..dd7b7ebb76b --- /dev/null +++ b/app/jobs/claim_not_established_fix_job.rb @@ -0,0 +1,54 @@ +# frozen_string_literal: true + +class ClaimNotEstablishedFixJob < CaseflowJob + ERROR_TEXT = "Claim not established." + EPECODES = %w[030 040 930 682].freeze + + attr_reader :stuck_job_report_service + + def initialize + @stuck_job_report_service = StuckJobReportService.new + end + + def perform + return if decision_docs_with_errors.blank? + + stuck_job_report_service.append_record_count(decision_docs_with_errors.count, ERROR_TEXT) + + decision_docs_with_errors.each do |single_decision_document| + file_number = single_decision_document.veteran.file_number + epe_array = EndProductEstablishment.where(veteran_file_number: file_number) + validated_epes = epe_array.map { |epe| validate_epe(epe) } + + stuck_job_report_service.append_single_record(single_decision_document.class.name, single_decision_document.id) + + resolve_error_on_records(single_decision_document, validated_epes) + end + + stuck_job_report_service.append_record_count(decision_docs_with_errors.count, ERROR_TEXT) + stuck_job_report_service.write_log_report(ERROR_TEXT) + end + + def decision_docs_with_errors + DecisionDocument.where("error ILIKE ?", "%#{ERROR_TEXT}%") + end + + def validate_epe(epe) + epe_code = epe&.code&.slice(0, 3) + EPECODES.include?(epe_code) && epe&.established_at.present? + end + + private + + # :reek:FeatureEnvy + def resolve_error_on_records(object_type, epes_array) + ActiveRecord::Base.transaction do + if !epes_array.include?(false) + object_type.clear_error! + end + rescue StandardError => error + log_error(error) + stuck_job_report_service.append_errors(object_type.class.name, object_type.id, error) + end + end +end diff --git a/app/jobs/contention_not_found_remediation_job.rb b/app/jobs/contention_not_found_remediation_job.rb index 9728f871f17..62a90376834 100644 --- a/app/jobs/contention_not_found_remediation_job.rb +++ b/app/jobs/contention_not_found_remediation_job.rb @@ -6,9 +6,12 @@ class ContentionNotFoundRemediationJob < CaseflowJob queue_with_priority :low_priority + S3_FOLDER_NAME = "data-remediation-output" + def initialize @logs = ["\nVBMS::ContentionNotFound Remediation Log"] @remediated_request_issues_update_ids = [] + @folder_name = (Rails.deploy_env == :prod) ? S3_FOLDER_NAME : "#{S3_FOLDER_NAME}-#{Rails.deploy_env}" super end @@ -141,26 +144,8 @@ def sync_epe!(request_issues_update, request_issue, index) # Save Logs to S3 Bucket def store_logs_in_s3_bucket - # Set Client Resources for AWS - Aws.config.update(region: "us-gov-west-1") - s3client = Aws::S3::Client.new - s3resource = Aws::S3::Resource.new(client: s3client) - s3bucket = s3resource.bucket("data-remediation-output") - - # Folder and File name - file_name = "contention-not-found-remediation-logs/cnf-remediation-log-#{Time.zone.now}" - - # Store contents of logs array in a temporary file content = @logs.join("\n") - temporary_file = Tempfile.new("cnf-log.txt") - filepath = temporary_file.path - temporary_file.write(content) - temporary_file.flush - - # Store File in S3 bucket - s3bucket.object(file_name).upload_file(filepath, acl: "private", server_side_encryption: "AES256") - - # Delete Temporary File - temporary_file.close! + file_name = "contention-not-found-remediation-logs/cnf-remediation-log-#{Time.zone.now}" + S3Service.store_file("#{@folder_name}/#{file_name}", content) end end diff --git a/app/jobs/decision_issue_sync_job.rb b/app/jobs/decision_issue_sync_job.rb index 26f08367a25..6501df05b89 100644 --- a/app/jobs/decision_issue_sync_job.rb +++ b/app/jobs/decision_issue_sync_job.rb @@ -11,6 +11,11 @@ def perform(request_issue_or_effectuation) begin request_issue_or_effectuation.sync_decision_issues! + rescue Caseflow::Error::SyncLockFailed => error + request_issue_or_effectuation.update_error!(error.inspect) + request_issue_or_effectuation.update!(decision_sync_attempted_at: Time.zone.now - 11.hours - 55.minutes) + capture_exception(error: error) + Rails.logger.error error.inspect rescue Errno::ETIMEDOUT => error # no Raven report. We'll try again later. Rails.logger.error error diff --git a/app/jobs/decision_review_process_job.rb b/app/jobs/decision_review_process_job.rb index 0e9ad7bfd07..fc9bae7c789 100644 --- a/app/jobs/decision_review_process_job.rb +++ b/app/jobs/decision_review_process_job.rb @@ -7,9 +7,6 @@ class DecisionReviewProcessJob < CaseflowJob application_attr :intake def perform(thing_to_establish) - # Temporarily stop establishing claims due to VBMS bug - return if FeatureToggle.enabled?(:disable_claim_establishment, user: RequestStore.store[:current_user]) - @decision_review = thing_to_establish # If establishment is for a RequestIssuesUpdate, use the user on the update diff --git a/app/jobs/dta_sc_creation_failed_fix_job.rb b/app/jobs/dta_sc_creation_failed_fix_job.rb new file mode 100644 index 00000000000..8a6a077f6c7 --- /dev/null +++ b/app/jobs/dta_sc_creation_failed_fix_job.rb @@ -0,0 +1,36 @@ +# frozen_string_literal: true + +class DtaScCreationFailedFixJob < CaseflowJob + ERROR_TEXT = "DTA SC Creation Failed" + + # :reek:FeatureEnvy + def perform + stuck_job_report_service = StuckJobReportService.new + return if hlrs_with_errors.blank? + + stuck_job_report_service.append_record_count(hlrs_with_errors.count, ERROR_TEXT) + + hlrs_with_errors.each do |hlr| + next unless SupplementalClaim.find_by( + decision_review_remanded_id: hlr.id, + decision_review_remanded_type: "HigherLevelReview" + ) + + stuck_job_report_service.append_single_record(hlr.class.name, hlr.id) + + ActiveRecord::Base.transaction do + hlr.clear_error! + rescue StandardError => error + log_error(error) + stuck_job_report_service.append_error(hlr.class.name, hlr.id, error) + end + end + + stuck_job_report_service.append_record_count(hlrs_with_errors.count, ERROR_TEXT) + stuck_job_report_service.write_log_report(ERROR_TEXT) + end + + def hlrs_with_errors + HigherLevelReview.where("establishment_error ILIKE ?", "%#{ERROR_TEXT}%") + end +end diff --git a/app/jobs/duplicate_ep_remediation_job.rb b/app/jobs/duplicate_ep_remediation_job.rb index f1289becbb8..a1ac9570eda 100644 --- a/app/jobs/duplicate_ep_remediation_job.rb +++ b/app/jobs/duplicate_ep_remediation_job.rb @@ -1,6 +1,6 @@ # frozen_string_literal: true -class DuplicateEpRemediationJob < ApplicationJob +class DuplicateEpRemediationJob < CaseflowJob queue_with_priority :low_priority application_attr :intake def perform diff --git a/app/jobs/legacy_notification_efolder_sync_job.rb b/app/jobs/legacy_notification_efolder_sync_job.rb index d37847a8dbc..1aa41950469 100644 --- a/app/jobs/legacy_notification_efolder_sync_job.rb +++ b/app/jobs/legacy_notification_efolder_sync_job.rb @@ -15,9 +15,15 @@ class LegacyNotificationEfolderSyncJob < CaseflowJob def perform RequestStore[:current_user] = User.system_user - all_active_legacy_appeals = appeals_recently_outcoded + appeals_never_synced + ready_for_resync - - sync_notification_reports(all_active_legacy_appeals.first(BATCH_LIMIT.to_i)) + all_active_legacy_appeals = if FeatureToggle.enabled?(:phase_1_notification_sync_job_rollout) + appeals_never_synced + elsif FeatureToggle.enabled?(:phase_2_notification_sync_job_rollout) + appeals_never_synced + ready_for_resync + else + appeals_recently_outcoded + appeals_never_synced + ready_for_resync + end + + sync_notification_reports(all_active_legacy_appeals.uniq(&:id).first(BATCH_LIMIT.to_i)) end private @@ -71,7 +77,7 @@ def previous_case_notifications_document_join_clause end def open_root_task_join_clause - "JOIN tasks t ON t.appeal_type = 'LegacyAppeal' AND t.id = legacy_appeals.id \ + "JOIN tasks t ON t.appeal_type = 'LegacyAppeal' AND t.appeal_id = legacy_appeals.id \ AND t.type = 'RootTask' AND t.status NOT IN ('completed', 'cancelled')" end @@ -99,22 +105,24 @@ def ready_for_resync # Return: Array of active appeals def get_appeals_from_prev_synced_ids(appeal_ids) appeal_ids.in_groups_of(1000, false).flat_map do |ids| - LegacyAppeal.where(id: RootTask.open.where(appeal_type: "LegacyAppeal").pluck(:appeal_id)) - .find_by_sql( - <<-SQL - SELECT la.* - FROM legacy_appeals la + LegacyAppeal.find_by_sql( + <<-SQL + SELECT la.* FROM legacy_appeals la + JOIN tasks t ON la.id = t.appeal_id + AND t.appeal_type = 'LegacyAppeal' JOIN (#{appeals_on_latest_notifications(ids)}) AS notifs ON notifs.appeals_id = la.vacols_id AND notifs.appeals_type = 'LegacyAppeal' JOIN (#{appeals_on_latest_doc_uploads(ids)}) AS vbms_uploads ON vbms_uploads.appeal_id = la.id AND vbms_uploads.appeal_type = 'LegacyAppeal' - WHERE + WHERE ( notifs.notified_at > vbms_uploads.attempted_at OR notifs.created_at > vbms_uploads.attempted_at + ) + AND t.type = 'RootTask' AND t.status NOT IN ('completed', 'cancelled') GROUP BY la.id - SQL - ) + SQL + ) end end @@ -122,8 +130,16 @@ def appeals_on_latest_notifications(appeal_ids) <<-SQL SELECT n1.* FROM legacy_appeals a JOIN notifications n1 on n1.appeals_id = a.vacols_id AND n1.appeals_type = 'LegacyAppeal' - LEFT OUTER JOIN notifications n2 ON (n2.appeals_id = a.vacols_id AND n1.appeals_type = 'LegacyAppeal' AND - (n1.notified_at < n2.notified_at OR (n1.notified_at = n2.notified_at AND n1.id < n2.id))) + AND (n1.email_notification_status IS NULL OR + n1.email_notification_status NOT IN ('No Participant Id Found', 'No Claimant Found', 'No External Id')) + AND (n1.sms_notification_status IS NULL OR + n1.sms_notification_status NOT IN ('No Participant Id Found', 'No Claimant Found', 'No External Id')) + LEFT OUTER JOIN notifications n2 ON (n2.appeals_id = a.vacols_id AND n1.appeals_type = 'LegacyAppeal' + AND (n2.email_notification_status IS NULL OR + n2.email_notification_status NOT IN ('No Participant Id Found', 'No Claimant Found', 'No External Id')) + AND (n2.sms_notification_status IS NULL OR + n2.sms_notification_status NOT IN ('No Participant Id Found', 'No Claimant Found', 'No External Id')) + AND (n1.notified_at < n2.notified_at OR (n1.notified_at = n2.notified_at AND n1.id < n2.id))) WHERE n2.id IS NULL AND n1.id IS NOT NULL AND (n1.email_notification_status <> 'Failure Due to Deceased' diff --git a/app/jobs/no_available_modifiers_fix_job.rb b/app/jobs/no_available_modifiers_fix_job.rb new file mode 100644 index 00000000000..f51c71e3d32 --- /dev/null +++ b/app/jobs/no_available_modifiers_fix_job.rb @@ -0,0 +1,60 @@ +# frozen_string_literal: true + +class NoAvailableModifiersFixJob < CaseflowJob + ERROR_TEXT = "NoAvailableModifiers" + SPACE = 10 + + def initialize + @stuck_job_report_service = StuckJobReportService.new + super + end + + def perform + @stuck_job_report_service.append_record_count(supp_claims_with_errors.count, ERROR_TEXT) + veterans_with_errors.each do |vet_fn| + active_count = current_active_eps_count(vet_fn) || 0 + available_space = SPACE - active_count + next if available_space <= 0 + + supp_claims = supp_claims_on_veteran(vet_fn) + next if supp_claims.empty? + + process_supplemental_claims(supp_claims, available_space) + end + @stuck_job_report_service.append_record_count(supp_claims_with_errors.count, ERROR_TEXT) + @stuck_job_report_service.write_log_report(ERROR_TEXT) + end + + # :reek:FeatureEnvy + def process_supplemental_claims(supp_claims, available_space) + supp_claims.each do |sc| + next if available_space <= 0 + + @stuck_job_report_service.append_single_record(sc.class.name, sc.id) + ActiveRecord::Base.transaction do + DecisionReviewProcessJob.perform_later(sc) + rescue StandardError => error + log_error(error) + @stuck_job_report_service.append_error(sc.class.name, sc.id, error) + end + available_space -= 1 + end + end + + def supp_claims_on_veteran(file_number) + supp_claims_with_errors.select { |sc| sc.veteran_file_number == file_number } + end + + def current_active_eps_count(file_number) + synced_statuses = EndProductEstablishment.where(veteran_file_number: file_number).pluck(:synced_status).compact + synced_statuses.count { |status| status != "CAN" && status != "CLR" } + end + + def veterans_with_errors + supp_claims_with_errors.pluck(:veteran_file_number).uniq + end + + def supp_claims_with_errors + SupplementalClaim.where("establishment_error ILIKE ?", "%#{ERROR_TEXT}%") + end +end diff --git a/app/jobs/page_requested_by_user_fix_job.rb b/app/jobs/page_requested_by_user_fix_job.rb new file mode 100644 index 00000000000..2d129bcc866 --- /dev/null +++ b/app/jobs/page_requested_by_user_fix_job.rb @@ -0,0 +1,39 @@ +# frozen_string_literal: true + +class PageRequestedByUserFixJob < CaseflowJob + ERROR_TEXT = "Page requested by the user is unavailable" + + def initialize + @stuck_job_report_service = StuckJobReportService.new + super + end + + def perform + clear_bge_errors if bges_with_errors.present? + end + + # :reek:FeatureEnvy + def resolve_error_on_records(object_type) + object_type.clear_error! + rescue StandardError => error + log_error(error) + @stuck_job_report_service.append_errors(object_type.class.name, object_type.id, error) + end + + def clear_bge_errors + @stuck_job_report_service.append_record_count(bges_with_errors.count, ERROR_TEXT) + + bges_with_errors.each do |bge| + next if bge.end_product_establishment.nil? || bge.end_product_establishment.established_at.blank? + + @stuck_job_report_service.append_single_record(bge.class.name, bge.id) + resolve_error_on_records(bge) + end + @stuck_job_report_service.append_record_count(bges_with_errors.count, ERROR_TEXT) + @stuck_job_report_service.write_log_report(ERROR_TEXT) + end + + def bges_with_errors + BoardGrantEffectuation.where("decision_sync_error ILIKE?", "%#{ERROR_TEXT}%") + end +end diff --git a/app/jobs/process_notification_status_updates_job.rb b/app/jobs/process_notification_status_updates_job.rb new file mode 100644 index 00000000000..32bb0e443cb --- /dev/null +++ b/app/jobs/process_notification_status_updates_job.rb @@ -0,0 +1,45 @@ +# frozen_string_literal: true + +class ProcessNotificationStatusUpdatesJob < CaseflowJob + queue_with_priority :low_priority + + def perform + RequestStore[:current_user] = User.system_user + + redis = Redis.new(url: Rails.application.secrets.redis_url_cache) + + processed_count = 0 + + # prefer scan so we only load a single record into memory, + # dumping the whole list could cause performance issues when job runs + redis.scan_each(match: "*_update:*") do |key| + break if processed_count >= 1000 + + begin + raw_notification_type, uuid, status = key.split(":") + + notification_type = extract_notification_type(raw_notification_type) + + fail InvalidNotificationStatusFormat if [notification_type, uuid, status].any?(&:nil?) + + rows_updated = Notification.select(Arel.star).where( + Notification.arel_table["#{notification_type}_notification_external_id".to_sym].eq(uuid) + ).update_all("#{notification_type}_notification_status" => status) + + fail StandardError, "No notification matches UUID #{uuid}" if rows_updated.zero? + rescue StandardError => error + log_error(error) + ensure + # cleanup keys - do first so we don't reporcess any failed keys + redis.del key + processed_count += 1 + end + end + end + + private + + def extract_notification_type(raw_notification_type) + raw_notification_type.split("_").first + end +end diff --git a/app/jobs/sc_dta_for_appeal_fix_job.rb b/app/jobs/sc_dta_for_appeal_fix_job.rb new file mode 100644 index 00000000000..2c969b343a9 --- /dev/null +++ b/app/jobs/sc_dta_for_appeal_fix_job.rb @@ -0,0 +1,45 @@ +# frozen_string_literal: true + +class ScDtaForAppealFixJob < CaseflowJob + ERRORTEXT = "Can't create a SC DTA for appeal" + + def records_with_errors + DecisionDocument.where("error ILIKE ?", "%#{ERRORTEXT}%") + end + + def sc_dta_for_appeal_fix + stuck_job_report_service = StuckJobReportService.new + return if records_with_errors.blank? + + # count of records with errors before fix + stuck_job_report_service.append_record_count(records_with_errors.count, ERRORTEXT) + + records_with_errors.each do |decision_doc| + claimant = decision_doc.appeal.claimant + + next unless claimant.payee_code.nil? + + if claimant.type == "VeteranClaimant" + claimant.update!(payee_code: "00") + elsif claimant.type == "DependentClaimant" + claimant.update!(payee_code: "10") + end + stuck_job_report_service.append_single_record(decision_doc.class.name, decision_doc.id) + clear_error_on_record(decision_doc) + end + + # record count with errors after fix + stuck_job_report_service.append_record_count(records_with_errors.count, ERRORTEXT) + stuck_job_report_service.write_log_report(ERRORTEXT) + end + + # :reek:FeatureEnvy + def clear_error_on_record(decision_doc) + ActiveRecord::Base.transaction do + decision_doc.clear_error! + rescue StandardError => error + log_error(error) + stuck_job_report_service.append_errors(decision_doc.class.name, decision_doc.id, error) + end + end +end diff --git a/app/jobs/unknown_user_fix_job.rb b/app/jobs/unknown_user_fix_job.rb new file mode 100644 index 00000000000..6fc412dee74 --- /dev/null +++ b/app/jobs/unknown_user_fix_job.rb @@ -0,0 +1,49 @@ +# frozen_string_literal: true + +class UnknownUserFixJob < CaseflowJob + ERROR_TEXT = "UnknownUser" + + def initialize + @stuck_job_report_service = StuckJobReportService.new + super + end + + def perform(date = "2023-08-07") + date = date.to_s + pattern = /^\d{4}-\d{2}-\d{2}$/ + if !date.match?(pattern) + fail ArgumentError, "Incorrect date format, use 'YYYY-mm-dd'" + end + + begin + parsed_date = Time.zone.parse(date) + rescue ArgumentError => error + log_error(error) + raise error + end + return if rius_with_errors.blank? + + @stuck_job_report_service.append_record_count(rius_with_errors.count, ERROR_TEXT) + rius_with_errors.each do |single_riu| + next if single_riu.created_at.nil? || single_riu.created_at > parsed_date + + @stuck_job_report_service.append_single_record(single_riu.class.name, single_riu.id) + + resolve_error_on_records(single_riu) + end + @stuck_job_report_service.append_record_count(rius_with_errors.count, ERROR_TEXT) + @stuck_job_report_service.write_log_report(ERROR_TEXT) + end + + # :reek:FeatureEnvy + def resolve_error_on_records(object_type) + object_type.clear_error! + rescue StandardError => error + log_error(error) + @stuck_job_report_service.append_errors(object_type.class.name, object_type.id, error) + end + + def rius_with_errors + RequestIssuesUpdate.where("error ILIKE ?", "%#{ERROR_TEXT}%") + end +end diff --git a/app/jobs/update_appellant_representation_job.rb b/app/jobs/update_appellant_representation_job.rb index 081741c104b..36ee5b65857 100644 --- a/app/jobs/update_appellant_representation_job.rb +++ b/app/jobs/update_appellant_representation_job.rb @@ -7,7 +7,6 @@ class UpdateAppellantRepresentationJob < CaseflowJob include ActionView::Helpers::DateHelper queue_with_priority :low_priority application_attr :queue - APP_NAME = "caseflow_job" METRIC_GROUP_NAME = UpdateAppellantRepresentationJob.name.underscore TOTAL_NUMBER_OF_APPEALS_TO_UPDATE = 1000 diff --git a/app/models/appeal.rb b/app/models/appeal.rb index 4ea29b9773a..81572c4ca64 100644 --- a/app/models/appeal.rb +++ b/app/models/appeal.rb @@ -9,7 +9,6 @@ # rubocop:disable Metrics/ClassLength class Appeal < DecisionReview - include AppealConcern include BeaamAppealConcern include BgsService include Taskable @@ -62,16 +61,6 @@ class Appeal < DecisionReview :email_address, :country, to: :veteran, prefix: true - delegate :power_of_attorney, to: :claimant - delegate :representative_name, - :representative_type, - :representative_address, - :representative_email_address, - :poa_last_synced_at, - :update_cached_attributes!, - :save_with_updated_bgs_record!, - to: :power_of_attorney, allow_nil: true - enum stream_type: { Constants.AMA_STREAM_TYPES.original.to_sym => Constants.AMA_STREAM_TYPES.original, Constants.AMA_STREAM_TYPES.vacate.to_sym => Constants.AMA_STREAM_TYPES.vacate, @@ -173,9 +162,7 @@ class IssueAlreadyDuplicated < StandardError; end sm_claim.uuid = SecureRandom.uuid # make sure uuid doesn't exist in the database (by some chance) - while SupplementalClaim.find_by(uuid: sm_claim.uuid).nil? == false - sm_claim.uuid = SecureRandom.uuid - end + sm_claim.uuid = SecureRandom.uuid while SupplementalClaim.find_by(uuid: sm_claim.uuid).nil? == false end }) end @@ -184,10 +171,10 @@ def hearing_day_if_schedueled hearing_date = Hearing.find_by(appeal_id: id) if hearing_date.nil? - return nil + nil else - return hearing_date.hearing_day.scheduled_for + hearing_date.hearing_day.scheduled_for end end @@ -262,10 +249,13 @@ def contested_claim? category_substrings = %w[Contested Apportionment] request_issues.active.any? do |request_issue| - category_substrings.any? { |substring| self.request_issues.active.include?(request_issue) && request_issue.nonrating_issue_category&.include?(substring) } + category_substrings.any? do |substring| + request_issues.active.include?(request_issue) && request_issue.nonrating_issue_category&.include?(substring) + end end end + # :reek:RepeatedConditionals # decision issue status overrules request issues/special issue list for both mst and pact def mst? return false unless FeatureToggle.enabled?(:mst_identification, user: RequestStore[:current_user]) @@ -278,6 +268,7 @@ def mst? special_issue_list.military_sexual_trauma) end + # :reek:RepeatedConditionals def pact? return false unless FeatureToggle.enabled?(:pact_identification, user: RequestStore[:current_user]) @@ -312,6 +303,7 @@ def decorated_with_status AppealStatusApiDecorator.new(self) end + # :reek:RepeatedConditionals def active_request_issues_or_decision_issues decision_issues.empty? ? active_request_issues : fetch_all_decision_issues end @@ -383,6 +375,8 @@ def clone_cavc_remand(parent_appeal, user_css_id) dup_remand&.save end + # rubocop:disable Metrics/MethodLength, Metrics/AbcSize + # :reek:RepeatedConditionals # clone issues clones request_issues the user selected # and anydecision_issues/decision_request_issues tied to the request issue def clone_issues(parent_appeal, payload_params) @@ -437,6 +431,7 @@ def clone_issues(parent_appeal, payload_params) end end end + # rubocop:enable Metrics/MethodLength, Metrics/AbcSize def clone_aod(parent_appeal) # find the appeal AOD @@ -500,6 +495,7 @@ def clone_hearings(parent_appeal) end end + # rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity def clone_task_tree(parent_appeal, user_css_id) # get the task tree from the parent parent_ordered_tasks = parent_appeal.tasks.order(:created_at) @@ -534,6 +530,7 @@ def clone_task_tree(parent_appeal, user_css_id) break if parent_appeal.tasks.count == tasks.count end end + # rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity # clone_task is used for splitting an appeal, tie to css_id for split def clone_task(original_task, user_css_id) @@ -789,10 +786,6 @@ def untimely_issues_report(new_date) issues_report end - def bgs_power_of_attorney - claimant&.is_a?(BgsRelatedClaimant) ? power_of_attorney : nil - end - # Note: Currently Caseflow only supports one claimant per decision review def power_of_attorneys claimants.map(&:power_of_attorney).compact diff --git a/app/models/batch_processes/batch_process.rb b/app/models/batch_processes/batch_process.rb new file mode 100644 index 00000000000..8423dad60c6 --- /dev/null +++ b/app/models/batch_processes/batch_process.rb @@ -0,0 +1,107 @@ +# frozen_string_literal: true + +class BatchProcess < CaseflowRecord + self.inheritance_column = :batch_type + has_many :priority_end_product_sync_queue, foreign_key: "batch_id", primary_key: "batch_id" + has_many :end_product_establishments, through: :priority_end_product_sync_queue + after_initialize :init_counters + + ERROR_LIMIT = ENV["BATCH_PROCESS_MAX_ERRORS_BEFORE_STUCK"].to_i + ERROR_DELAY = ENV["BATCH_PROCESS_ERROR_DELAY"].to_i + BATCH_LIMIT = ENV["BATCH_PROCESS_BATCH_LIMIT"].to_i + + scope :completed_batch_process_ids, -> { where(state: Constants.BATCH_PROCESS.completed).select(:batch_id) } + scope :needs_reprocessing, lambda { + where("created_at <= ? AND state <> ?", BatchProcess::ERROR_DELAY.hours.ago, Constants.BATCH_PROCESS.completed) + } + + enum state: { + Constants.BATCH_PROCESS.pre_processing.to_sym => Constants.BATCH_PROCESS.pre_processing, + Constants.BATCH_PROCESS.processing.to_sym => Constants.BATCH_PROCESS.processing, + Constants.BATCH_PROCESS.completed.to_sym => Constants.BATCH_PROCESS.completed + } + + class << self + # Purpose: A no-op method for overriding, intended to find records to batch from a Queue table + # + # Params: None + # + # Response: Records to Batch + def find_records_to_batch + # no-op, can be overwritten + end + + # Purpose: A no-op method for overriding, intended to create a Batch Process record and assign its batch_id + # to the records gathered by the find_records_to_batch method. + # + # Params: Records retrieved from a Queue table that need to be assigned to a Batch Process + # + # Response: Newly Created Batch Process + # :reek:UnusedParameters + def create_batch!(_records) + # no-op, can be overwritten + end + end + + # Purpose: A no-op method for overriding, intended to process all records assinged to a Batch Process + # + # Params: None + # + # Response: Returns True if batch is processed successfully + def process_batch! + # no-op, can be overwritten + end + + private + + attr_accessor :completed_count, :failed_count + + # Initialize Counters + def init_counters + @completed_count = 0 + @failed_count = 0 + end + + def increment_completed + self.completed_count += 1 + end + + def increment_failed + self.failed_count += 1 + end + + # State update Methods + def batch_processing! + update!(state: Constants.BATCH_PROCESS.processing, started_at: Time.zone.now) + end + + def batch_complete! + update!(state: Constants.BATCH_PROCESS.completed, + records_failed: failed_count, + records_completed: completed_count, + ended_at: Time.zone.now) + end + + # When a record and error is sent to this method, it updates the record and checks to see + # if the record should be declared stuck. If the records should be stuck, it calls the + # declare_record_stuck method (Found in priority_end_product_sync_queue.rb). + # Otherwise, the record is updated with status: error and the error message is added to + # error_messages. + # + # As a general method, it's assumed the record has a batch_id and error_messages + # column within the associated table. + # :reek:FeatureEnvy + def error_out_record!(record, error) + increment_failed + error_array = record.error_messages || [] + error_array.push("Error: #{error.inspect} - Batch ID: #{record.batch_id} - Time: #{Time.zone.now}.") + + if error_array.length >= ERROR_LIMIT + record.declare_record_stuck! + else + record.status_error!(error_array) + end + + Rails.logger.error(error.inspect) + end +end diff --git a/app/models/batch_processes/priority_ep_sync_batch_process.rb b/app/models/batch_processes/priority_ep_sync_batch_process.rb new file mode 100644 index 00000000000..3222f7d2b66 --- /dev/null +++ b/app/models/batch_processes/priority_ep_sync_batch_process.rb @@ -0,0 +1,97 @@ +# frozen_string_literal: true + +class PriorityEpSyncBatchProcess < BatchProcess + class << self + # Purpose: Finds records to batch from the Priority End Product Sync Queue (PEPSQ) table that + # have NO batch_id OR have a batch_id tied to a COMPLETED Batch Process (BATCHABLE), + # do NOT have a status of SYNCED OR STUCK (SYNCABLE), + # and have a last_batched_at date/time that is NULL OR greater than the ERROR_DELAY (READY_TO_BATCH). + # + # Params: None + # + # Response: PEPSQ records + def find_records_to_batch + PriorityEndProductSyncQueue.batchable.syncable.ready_to_batch.batch_limit + end + + # Purpose: Creates a Batch Process record and assigns its batch_id + # to the PEPSQ records gathered by the find_records_to_batch method. + # + # Params: Records retrieved from the Priority End Product Sync Queue (PEPSQ) table + # + # Response: Newly Created Batch Process + def create_batch!(records) + new_batch = PriorityEpSyncBatchProcess.create!(batch_type: name, + state: Constants.BATCH_PROCESS.pre_processing, + records_attempted: records.count) + + new_batch.assign_batch_to_queued_records!(records) + new_batch + end + end + + # Purpose: Updates the Batch Process status to processing then loops through each record within + # the batch. Each record's status is updated to processing, then the #sync! method is attempted. + # If the record fails, the error_out_record! method is called. + # + # Params: None + # + # Response: Returns True if batch is processed successfully + # rubocop:disable Metrics/MethodLength + # :reek:FeatureEnvy + def process_batch! + batch_processing! + + priority_end_product_sync_queue.each do |record| + record.status_processing! + epe = record.end_product_establishment + + begin + epe.sync! + epe.reload + + if epe.vbms_ext_claim.nil? + fail Caseflow::Error::PriorityEndProductSyncError, "Claim ID: #{epe.reference_id} not In VBMS_EXT_CLAIM." + elsif epe.synced_status != epe.vbms_ext_claim&.level_status_code + fail Caseflow::Error::PriorityEndProductSyncError, "EPE ID: #{epe&.id}. EPE synced_status of"\ + " #{epe.synced_status} does not match the VBMS_EXT_CLAIM level_status_code of"\ + " #{epe.vbms_ext_claim&.level_status_code}." + end + rescue StandardError => error + error_out_record!(record, error) + next + end + + record.status_sync! + increment_completed + end + + batch_complete! + destroy_synced_records_from_queue! + end + # rubocop:enable Metrics/MethodLength + + # Purpose: Assigns the Batch Process batch_id to Priority End Product Sync Queue (PEPSQ) records. + # + # Params: Records retrieved from the Priority End Product Sync Queue (PEPSQ) table + # + # Response: Newly batched PEPSQ records + def assign_batch_to_queued_records!(records) + records.each do |pepsq_record| + pepsq_record.update!(batch_id: batch_id, + status: Constants.PRIORITY_EP_SYNC.pre_processing, + last_batched_at: Time.zone.now) + end + end + + private + + # Purpose: Destroys "SYNCED" PEPSQ records to limit the growing number of table records. + # + # Params: None + # + # Response: Log message stating newly destroyed PEPSQ records + def destroy_synced_records_from_queue! + PriorityEndProductSyncQueue.destroy_batch_process_pepsq_records!(self) + end +end diff --git a/app/models/case_timeline_instruction_set.rb b/app/models/case_timeline_instruction_set.rb new file mode 100644 index 00000000000..de4af8ff15b --- /dev/null +++ b/app/models/case_timeline_instruction_set.rb @@ -0,0 +1,39 @@ +# frozen_string_literal: true + +# :reek:TooManyInstanceVariables +class CaseTimelineInstructionSet + attr_reader :change_type, + :issue_category, + :benefit_type, + :original_mst, + :original_pact, + :edit_mst, + :edit_pact, + :mst_edit_reason, + :pact_edit_reason + + # rubocop:disable Metrics/ParameterLists + # :reek:LongParameterList and :reek:TooManyInstanceVariables + def initialize( + change_type:, + issue_category:, + benefit_type:, + original_mst:, + original_pact:, + edit_mst: nil, + edit_pact: nil, + mst_edit_reason: nil, + pact_edit_reason: nil + ) + @change_type = change_type + @issue_category = issue_category + @benefit_type = benefit_type + @original_mst = original_mst + @original_pact = original_pact + @edit_mst = edit_mst + @edit_pact = edit_pact + @mst_edit_reason = mst_edit_reason + @pact_edit_reason = pact_edit_reason + end + # rubocop:enable Metrics/ParameterLists +end diff --git a/app/models/caseflow_stuck_record.rb b/app/models/caseflow_stuck_record.rb new file mode 100644 index 00000000000..f17111c7a50 --- /dev/null +++ b/app/models/caseflow_stuck_record.rb @@ -0,0 +1,18 @@ +# frozen_string_literal: true + +# This table consists of records that have repeatedly attempted +# to sync or be processed in some way but have continuously errored out. +# This table is polymorphic, records on this table could belong to more than one table. +# Records on this table are intended to be checked and fixed manually. + +class CaseflowStuckRecord < CaseflowRecord + belongs_to :stuck_record, polymorphic: true + + # Custom model association that will return the end_product_establishment for + # stuck records that are from the PriorityEndProductSyncQueue + def end_product_establishment + if stuck_record.is_a?(PriorityEndProductSyncQueue) + stuck_record.end_product_establishment + end + end +end diff --git a/app/models/certification.rb b/app/models/certification.rb index b5d8b84a0f0..8abbd5cd3ba 100644 --- a/app/models/certification.rb +++ b/app/models/certification.rb @@ -108,12 +108,6 @@ def form8 @form8 ||= Form8.find_by(certification_id: id) end - def time_to_certify - return nil if !completed_at || !created_at - - completed_at - created_at - end - def self.completed where("completed_at IS NOT NULL") end @@ -127,30 +121,6 @@ def self.v2 .or(where.not(vacols_representative_name: nil)) end - def self.was_missing_doc - was_missing_nod.or(was_missing_soc) - .or(was_missing_ssoc) - .or(was_missing_form9) - end - - def self.was_missing_nod - # allow 30 second lag just in case 'nod_matching_at' timestamp is a few seconds - # greater than 'created_at' timestamp - where(nod_matching_at: nil).or(where("nod_matching_at > created_at + INTERVAL '30 seconds'")) - end - - def self.was_missing_soc - where(soc_matching_at: nil).or(where("soc_matching_at > created_at + INTERVAL '30 seconds'")) - end - - def self.was_missing_ssoc - ssoc_required.where(ssocs_matching_at: nil).or(where("ssocs_matching_at > created_at + INTERVAL '30 seconds'")) - end - - def self.was_missing_form9 - where(form9_matching_at: nil).or(where("form9_matching_at > created_at + INTERVAL '30 seconds'")) - end - def self.ssoc_required where(ssocs_required: true) end diff --git a/app/models/certification_stats.rb b/app/models/certification_stats.rb deleted file mode 100644 index 4ddca0f08b4..00000000000 --- a/app/models/certification_stats.rb +++ /dev/null @@ -1,67 +0,0 @@ -# frozen_string_literal: true - -## -# CertificationStats is an interface to quickly access statistics for Caseflow Certification -# it is responsible for aggregating and caching statistics. -# -class CertificationStats < Caseflow::Stats - # :nocov: - CALCULATIONS = { - certifications_started: lambda do |range| - Certification.where(created_at: range).count - end, - - certifications_completed: lambda do |range| - Certification.where(completed_at: range).count - end, - - same_period_completions: lambda do |range| - Certification.completed.where(created_at: range).count - end, - - missing_doc_same_period_completions: lambda do |range| - Certification.was_missing_doc.merge(Certification.completed).where(created_at: range).count - end, - - time_to_certify: lambda do |range| - CertificationStats.percentile(:time_to_certify, Certification.where(completed_at: range), 95) - end, - - missing_doc_time_to_certify: lambda do |range| - CertificationStats.percentile(:time_to_certify, Certification.was_missing_doc.where(created_at: range), 95) - end, - - median_time_to_certify: lambda do |range| - CertificationStats.percentile(:time_to_certify, Certification.where(completed_at: range), 50) - end, - - median_missing_doc_time_to_certify: lambda do |range| - CertificationStats.percentile(:time_to_certify, Certification.was_missing_doc.where(created_at: range), 50) - end, - - missing_doc: lambda do |range| - Certification.was_missing_doc.where(created_at: range).count - end, - - missing_nod: lambda do |range| - Certification.was_missing_nod.where(created_at: range).count - end, - - missing_soc: lambda do |range| - Certification.was_missing_soc.where(created_at: range).count - end, - - missing_ssoc: lambda do |range| - Certification.was_missing_ssoc.where(created_at: range).count - end, - - ssoc_required: lambda do |range| - Certification.ssoc_required.where(created_at: range).count - end, - - missing_form9: lambda do |range| - Certification.was_missing_form9.where(created_at: range).count - end - }.freeze - # :nocov: -end diff --git a/app/models/claim_review.rb b/app/models/claim_review.rb index b86e6143c7a..b56e7cd4a56 100644 --- a/app/models/claim_review.rb +++ b/app/models/claim_review.rb @@ -98,8 +98,37 @@ def add_user_to_business_line! business_line.add_user(RequestStore.store[:current_user]) end + def handle_issues_with_no_decision_date! + # Guard clause to only perform this update for VHA claim reviews for now + return nil if benefit_type != "vha" + + if request_issues_without_decision_dates? + review_task = tasks.find { |task| task.is_a?(DecisionReviewTask) } + review_task&.on_hold! + elsif !request_issues_without_decision_dates? + review_task = tasks.find { |task| task.is_a?(DecisionReviewTask) } + review_task&.assigned! + end + end + + def request_issues_without_decision_dates? + request_issues.active.any? { |issue| issue.decision_date.blank? } + end + def create_business_line_tasks! create_decision_review_task! if processed_in_caseflow? + + tasks.reload + + handle_issues_with_no_decision_date! + end + + def redirect_url + if benefit_type == "vha" && request_issues_without_decision_dates? + "#{business_line.tasks_url}?tab=incomplete" + else + business_line.tasks_url + end end # Idempotent method to create all the artifacts for this claim. diff --git a/app/models/concerns/case_review_concern.rb b/app/models/concerns/case_review_concern.rb index 2fe7b38bff9..7d522d8495e 100644 --- a/app/models/concerns/case_review_concern.rb +++ b/app/models/concerns/case_review_concern.rb @@ -23,7 +23,7 @@ def appeal def associate_with_appeal # Populate appeal_* column values based on original implementation that uses `task_id` - update_attributes( + update( appeal_id: appeal_through_task_id&.id, appeal_type: appeal_through_task_id&.class&.name ) diff --git a/app/models/concerns/has_business_line.rb b/app/models/concerns/has_business_line.rb index 7679cc047df..cd179323fe7 100644 --- a/app/models/concerns/has_business_line.rb +++ b/app/models/concerns/has_business_line.rb @@ -5,7 +5,11 @@ module HasBusinessLine def business_line business_line_name = Constants::BENEFIT_TYPES[benefit_type] - @business_line ||= BusinessLine.find_or_create_by(name: business_line_name) { |org| org.url = benefit_type } + @business_line ||= if benefit_type == "vha" + VhaBusinessLine.singleton + else + BusinessLine.find_or_create_by(name: business_line_name) { |org| org.url = benefit_type } + end end def processed_in_vbms? diff --git a/app/models/concerns/has_unrecognized_party_detail.rb b/app/models/concerns/has_unrecognized_party_detail.rb index 1fef8a14ff5..5e04855f053 100644 --- a/app/models/concerns/has_unrecognized_party_detail.rb +++ b/app/models/concerns/has_unrecognized_party_detail.rb @@ -8,7 +8,7 @@ module HasUnrecognizedPartyDetail extend ActiveSupport::Concern included do - delegate :name, :first_name, :middle_name, :last_name, :suffix, :ssn, + delegate :name, :first_name, :middle_name, :last_name, :suffix, :ein, :ssn, :address, :address_line_1, :address_line_2, :address_line_3, :city, :state, :zip, :country, :date_of_birth, :phone_number, :email_address, :party_type, diff --git a/app/models/concerns/issue_updater.rb b/app/models/concerns/issue_updater.rb index ff7e6b94afe..3b5347083be 100644 --- a/app/models/concerns/issue_updater.rb +++ b/app/models/concerns/issue_updater.rb @@ -1,5 +1,6 @@ # frozen_string_literal: true +# rubocop:disable Metrics/ModuleLength module IssueUpdater extend ActiveSupport::Concern @@ -35,6 +36,8 @@ def update_issue_dispositions_in_vacols! private + # rubocop:disable Metrics/MethodLength, Metrics/CyclomaticComplexity, Metrics/AbcSize + # :reek:FeatureEnvy def create_decision_issues! ordered_issues = issues.sort_by { |issue| issue[:request_issue_ids]&.first } ordered_issues.each do |issue_attrs| @@ -54,7 +57,6 @@ def create_decision_issues! ) request_issues.each do |request_issue| - RequestDecisionIssue.create!(decision_issue: decision_issue, request_issue: request_issue) # compare the MST/PACT status of the orignial issue and decision to create task and record @@ -69,6 +71,7 @@ def create_decision_issues! create_remand_reasons(decision_issue, issue_attrs[:remand_reasons] || []) end end + # rubocop:enable Metrics/MethodLength, Metrics/CyclomaticComplexity, Metrics/AbcSize def fail_if_not_all_request_issues_have_decision! unless appeal.every_request_issue_has_decision? @@ -122,6 +125,8 @@ def create_remand_reasons(decision_issue, remand_reasons_attrs) end end + # rubocop:disable Metrics/AbcSize, Metrics/MethodLength + # :reek:FeatureEnvy def create_issue_update_task(original_issue, decision_issue) root_task = RootTask.find_or_create_by!(appeal: appeal) @@ -139,15 +144,22 @@ def create_issue_update_task(original_issue, decision_issue) completed_by: RequestStore[:current_user] ) - task.format_instructions( - "Edited Issue", - task_text_helper([original_issue.contested_issue_description, original_issue.nonrating_issue_category, original_issue.nonrating_issue_description]), - task_text_benefit_type(original_issue), - original_issue.mst_status, - original_issue.pact_status, - decision_issue.mst_status, - decision_issue.pact_status + set = CaseTimelineInstructionSet.new( + change_type: "Edited Issue", + issue_category: task_text_helper( + [ + original_issue.contested_issue_description, + original_issue.nonrating_issue_category, + original_issue.nonrating_issue_description + ] + ), + benefit_type: task_text_benefit_type(original_issue), + original_mst: original_issue.mst_status, + original_pact: original_issue.pact_status, + edit_mst: decision_issue.mst_status, + edit_pact: decision_issue.pact_status ) + task.format_instructions(set) task.completed! @@ -169,6 +181,7 @@ def create_issue_update_task(original_issue, decision_issue) decision_issue_id: decision_issue.id ) end + # rubocop:enable Metrics/AbcSize, Metrics/MethodLength def task_text_benefit_type(issue) issue.benefit_type ? issue.benefit_type.capitalize : "" @@ -184,3 +197,4 @@ def task_text_helper(text_array) end end end +# rubocop:enable Metrics/ModuleLength diff --git a/app/models/concerns/sync_lock.rb b/app/models/concerns/sync_lock.rb new file mode 100644 index 00000000000..5a7cefac4e3 --- /dev/null +++ b/app/models/concerns/sync_lock.rb @@ -0,0 +1,37 @@ +# frozen_string_literal: true + +require "redis" + +module SyncLock + extend ActiveSupport::Concern + LOCK_TIMEOUT = ENV["SYNC_LOCK_MAX_DURATION"] + + # rubocop:disable Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity + def hlr_sync_lock + if decision_review.is_a?(HigherLevelReview) && block_given? + redis = Redis.new(url: Rails.application.secrets.redis_url_cache) + lock_key = "hlr_sync_lock:#{end_product_establishment.id}" + + begin + # create the sync lock with a key, value pair only IF it doesn't already exist + # and give it an expiration time upon creation. + sync_lock_acquired = redis.set(lock_key, "lock is set", nx: true, ex: LOCK_TIMEOUT.to_i) + Rails.logger.info(lock_key + " has been created") if sync_lock_acquired + + fail Caseflow::Error::SyncLockFailed, message: Time.zone.now.to_s unless sync_lock_acquired + + yield + ensure + # Delete the lock upon exiting if it was created during this session + redis.del(lock_key) if sync_lock_acquired + # if lock was acquired and is later unretrievable, then it was deleted/expired + if !redis.get(lock_key) && sync_lock_acquired + Rails.logger.info(lock_key + " has been released") + end + end + elsif block_given? + yield + end + end + # rubocop:enable Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity +end diff --git a/app/models/concerns/task_extension_for_hearings.rb b/app/models/concerns/task_extension_for_hearings.rb index ccbfd69c5a8..0cb19c66680 100644 --- a/app/models/concerns/task_extension_for_hearings.rb +++ b/app/models/concerns/task_extension_for_hearings.rb @@ -110,4 +110,19 @@ def withdraw_hearing(parent) ) end end + + # Purpose: When a hearing is postponed through the completion of a NoShowHearingTask, AssignHearingDispositionTask, + # or ChangeHearingDispositionTask, cancel any open HearingPostponementRequestMailTasks associated with the + # appeal, as they have become redundant. + def cancel_redundant_hearing_postponement_req_tasks + open_hearing_postponement_requests.each { |task| task.cancel_when_redundant(self, updated_at) } + end + + # Purpose: Finds open HearingPostponementRequestMailTasks (assigned to HearingAdmin and not MailTeam) in task tree + def open_hearing_postponement_requests + appeal.tasks.where( + type: HearingPostponementRequestMailTask.name, + assigned_to: HearingAdmin.singleton + )&.open + end end diff --git a/app/models/contestable_issue.rb b/app/models/contestable_issue.rb index f3af9d212ed..cf456ad3703 100644 --- a/app/models/contestable_issue.rb +++ b/app/models/contestable_issue.rb @@ -15,7 +15,6 @@ class ContestableIssue class << self def from_rating_issue(rating_issue, contesting_decision_review) - # epe = EndProductEstablishment.find_by(reference_id: rating_issue.reference_id) new( rating_issue_reference_id: rating_issue.reference_id, rating_issue_profile_date: rating_issue.profile_date.to_date, @@ -32,7 +31,7 @@ def from_rating_issue(rating_issue, contesting_decision_review) # account for that. source_request_issues: rating_issue.source_request_issues, source_decision_review: rating_issue.source_request_issues.first&.decision_review, - special_issues: rating_issue.special_issues + special_issues: SpecialIssuesComparator.new(rating_issue).special_issues ) end @@ -50,7 +49,7 @@ def from_decision_issue(decision_issue, contesting_decision_review) source_request_issues: decision_issue.request_issues.active, source_decision_review: source, contesting_decision_review: contesting_decision_review, - is_rating: decision_issue.rating?, + is_rating: decision_issue.rating? ) end @@ -63,8 +62,8 @@ def from_rating_decision(rating_decision, contesting_decision_review) description: rating_decision.decision_text, contesting_decision_review: contesting_decision_review, rating_issue_diagnostic_code: rating_decision.diagnostic_code, - special_issues: rating_decision.special_issues, - is_rating: true, # true even if rating_reference_id is nil + special_issues: SpecialIssuesComparator.new(rating_decision).special_issues, + is_rating: true # true even if rating_reference_id is nil ) end end diff --git a/app/models/decision_review.rb b/app/models/decision_review.rb index 27b3af72e04..f7f7d155f75 100644 --- a/app/models/decision_review.rb +++ b/app/models/decision_review.rb @@ -1,6 +1,7 @@ # frozen_string_literal: true class DecisionReview < CaseflowRecord + include AppealConcern include CachedAttributes include Asyncable @@ -16,6 +17,16 @@ class DecisionReview < CaseflowRecord has_many :request_issues_updates, as: :review, dependent: :destroy has_one :intake, as: :detail + delegate :power_of_attorney, to: :claimant, allow_nil: true + delegate :representative_name, + :representative_type, + :representative_address, + :representative_email_address, + :poa_last_synced_at, + :update_cached_attributes!, + :save_with_updated_bgs_record!, + to: :power_of_attorney, allow_nil: true + cache_attribute :cached_serialized_ratings, cache_key: :ratings_cache_key, expires_in: 1.day do ratings_with_issues_or_decisions.map(&:serialize) end @@ -91,6 +102,10 @@ def ama_activation_date end end + def bgs_power_of_attorney + claimant&.is_a?(BgsRelatedClaimant) ? power_of_attorney : nil + end + def serialized_ratings return unless receipt_date return unless can_contest_rating_issues? @@ -389,7 +404,7 @@ def request_issues_ui_hash private def contestable_issue_generator - @contestable_issue_generator ||= ContestableIssueGenerator.new(self, get_special_issues: true) + @contestable_issue_generator ||= ContestableIssueGenerator.new(self) end def can_contest_rating_issues? diff --git a/app/models/dispatch_stats.rb b/app/models/dispatch_stats.rb deleted file mode 100644 index 67ee33ec657..00000000000 --- a/app/models/dispatch_stats.rb +++ /dev/null @@ -1,124 +0,0 @@ -# frozen_string_literal: true - -class DispatchStats < Caseflow::Stats - # since this is a heavy calculation, only run this at most once an hour - THROTTLE_RECALCULATION_PERIOD = 1.hour - - class << self - def throttled_calculate_all! - return if last_calculated_at && last_calculated_at > THROTTLE_RECALCULATION_PERIOD.ago - - calculate_all!(clear_cache: true) - Rails.cache.write(cache_key, Time.zone.now.to_i) - end - - private - - def last_calculated_at - return @last_calculated_timestamp if @last_calculated_timestamp - - timestamp = Rails.cache.read(cache_key) - timestamp && Time.zone.at(timestamp.to_i) - end - - def cache_key - "#{name}-last-calculated-timestamp" - end - end - - CALCULATIONS = { - establish_claim_identified: lambda do |range| - EstablishClaim.where(created_at: range).count - end, - - establish_claim_identified_full_grant: lambda do |range| - EstablishClaim.where(created_at: range).for_full_grant.count - end, - - establish_claim_identified_partial_grant_remand: lambda do |range| - EstablishClaim.where(created_at: range).for_partial_grant_or_remand.count - end, - - establish_claim_active_users: lambda do |range| - EstablishClaim.where(completed_at: range).pluck(:user_id).uniq.count - end, - - establish_claim_started: lambda do |range| - EstablishClaim.where(started_at: range).count - end, - - establish_claim_completed: lambda do |range| - EstablishClaim.where(completed_at: range).count - end, - - establish_claim_full_grant_completed: lambda do |range| - EstablishClaim.where(completed_at: range).for_full_grant.count - end, - - establish_claim_partial_grant_remand_completed: lambda do |range| - EstablishClaim.where(completed_at: range).for_partial_grant_or_remand.count - end, - - establish_claim_canceled: lambda do |range| - EstablishClaim.where(completed_at: range).canceled.count - end, - - establish_claim_canceled_full_grant: lambda do |range| - EstablishClaim.where(completed_at: range).canceled.for_full_grant.count - end, - - establish_claim_canceled_partial_grant_remand: lambda do |range| - EstablishClaim.where(completed_at: range).canceled.for_partial_grant_or_remand.count - end, - - establish_claim_completed_success: lambda do |range| - EstablishClaim.where(completed_at: range).completed_success.count - end, - - establish_claim_completed_success_full_grant: lambda do |range| - EstablishClaim.where(completed_at: range).completed_success.for_full_grant.count - end, - - establish_claim_completed_success_partial_grant_remand: lambda do |range| - EstablishClaim.where(completed_at: range).completed_success.for_partial_grant_or_remand.count - end, - - establish_claim_prepared: lambda do |range| - EstablishClaim.where(prepared_at: range).count - end, - - establish_claim_prepared_full_grant: lambda do |range| - EstablishClaim.where(prepared_at: range).for_full_grant.count - end, - - establish_claim_prepared_partial_grant_remand: lambda do |range| - EstablishClaim.where(prepared_at: range).for_partial_grant_or_remand.count - end, - - time_to_establish_claim: lambda do |range| - DispatchStats.percentile(:time_to_complete, EstablishClaim.where(completed_at: range), 95) - end, - - median_time_to_establish_claim: lambda do |range| - DispatchStats.percentile(:time_to_complete, EstablishClaim.where(completed_at: range), 50) - end, - - time_to_establish_claim_full_grants: lambda do |range| - DispatchStats.percentile(:time_to_complete, EstablishClaim.where(completed_at: range).for_full_grant, 95) - end, - - median_time_to_establish_claim_full_grants: lambda do |range| - DispatchStats.percentile(:time_to_complete, EstablishClaim.where(completed_at: range).for_full_grant, 50) - end, - - time_to_establish_claim_partial_grants_remands: lambda do |range| - DispatchStats.percentile(:time_to_complete, EstablishClaim.where(completed_at: range) - .for_partial_grant_or_remand, 95) - end, - - median_time_to_establish_claim_partial_grants_remands: lambda do |range| - DispatchStats.percentile(:time_to_complete, EstablishClaim.where(completed_at: range) - .for_partial_grant_or_remand, 50) - end - }.freeze -end diff --git a/app/models/end_product_establishment.rb b/app/models/end_product_establishment.rb index bab3e8c5cdf..33cac326d2f 100644 --- a/app/models/end_product_establishment.rb +++ b/app/models/end_product_establishment.rb @@ -9,12 +9,27 @@ # the current status of the EP when the EndProductEstablishment is synced. class EndProductEstablishment < CaseflowRecord + # Using macro-style definition. The locking scope will be TheClass + # method and only one method can run at any given time. + include RedisMutex::Macro + belongs_to :source, polymorphic: true belongs_to :user has_many :request_issues has_many :end_product_code_updates has_many :effectuations, class_name: "BoardGrantEffectuation" has_many :end_product_updates + has_one :priority_end_product_sync_queue + belongs_to :vbms_ext_claim, foreign_key: "reference_id", primary_key: "claim_id", optional: true + + # :block => 1 # Specify in seconds how long you want to wait for the lock to be released. + # # Specify 0 if you need non-blocking sematics and return false immediately. (default: 1) + # :sleep => 0.1 # Specify in seconds how long the polling interval should be when :block is given. + # # It is NOT recommended to go below 0.01. (default: 0.1) + # :expire => 10 # Specify in seconds when the lock should be considered stale when something went wrong + # # with the one who held the lock and failed to unlock. (default: 10) + # auto_mutex :sync!, block: 60, expire: 100, after_failure: lambda { Rails.logger.error('failed to acquire lock! + # EPE sync is being called by another process. Please try again later.') } # allow @veteran to be assigned to save upstream calls attr_writer :veteran @@ -35,7 +50,7 @@ class ContentionNotFound < StandardError; end class << self def order_by_sync_priority - active.order("last_synced_at IS NOT NULL, last_synced_at ASC") + active.order(Arel.sql("last_synced_at IS NOT NULL, last_synced_at ASC")) end def established @@ -46,7 +61,7 @@ def active # We only know the set of inactive EP statuses # We also only know the EP status after fetching it from BGS # Therefore, our definition of active is when the EP is either - # not known or not known to be inactive + # not known or not known to be inactive established.where("synced_status NOT IN (?) OR synced_status IS NULL", EndProduct::INACTIVE_STATUSES) end end @@ -197,35 +212,45 @@ def cancel_unused_end_product! end end + # rubocop:disable Metrics/MethodLength def sync! - # There is no need to sync end_product_status if the status - # is already inactive since an EP can never leave that state - return true unless status_active? - - fail EstablishedEndProductNotFound, id unless result - - # load contentions now, in case "source" needs them. - # this VBMS call is slow and will cause the transaction below - # to timeout in some cases. - contentions unless result.status_type_code == EndProduct::STATUSES.key("Canceled") + RedisMutex.with_lock("EndProductEstablishment:#{id}", block: 60, expire: 100) do + # key => "EndProductEstablishment:id" + # There is no need to sync end_product_status if the status + # is already inactive since an EP can never leave that state + return true unless status_active? + + fail EstablishedEndProductNotFound, id unless result + + # load contentions now, in case "source" needs them. + # this VBMS call is slow and will cause the transaction below to timeout in some cases. + contentions unless result.status_type_code == EndProduct::STATUSES.key("Canceled") + + transaction do + update!(synced_status: result.status_type_code) + status_cancelled? ? handle_cancelled_ep! : sync_source! + close_request_issues_with_no_decision! + end - transaction do - update!( - synced_status: result.status_type_code, - last_synced_at: Time.zone.now - ) - status_cancelled? ? handle_cancelled_ep! : sync_source! - close_request_issues_with_no_decision! + save_updated_end_product_code! end - - save_updated_end_product_code! + rescue RedisMutex::LockError + Rails.logger.error("Failed to acquire lock for EPE ID: #{id}! #sync! is being"\ + " called by another process. Please try again later.") rescue EstablishedEndProductNotFound, AppealRepository::AppealNotValidToReopen => error raise error rescue StandardError => error Raven.extra_context(end_product_establishment_id: id) raise error + ensure + # Always update last_synced_at to ensure that SyncReviewsJob does not immediately re-enqueue + # End Product Establishments that fail to sync with BGS into the EndProductSyncJob. + # This will allow for other End Product Establishments to sync first before re-attempting. + update!(last_synced_at: Time.zone.now) end + # rubocop:enable Metrics/MethodLength + def fetch_dispositions_from_vbms VBMSService.get_dispositions!(claim_id: reference_id) end @@ -290,6 +315,15 @@ def associated_rating @associated_rating ||= fetch_associated_rating end + # Purpose: Check if End Product Establishment is enqueued in the Priority End Product Sync Queue. + # + # Params: NONE + # + # Response: True if End Product Establishment is queued to sync. False if not. + def priority_queued? + priority_end_product_sync_queue ? true : false + end + def sync_decision_issues! contention_records.each do |record| if record.respond_to?(:nonrating?) && record.nonrating? diff --git a/app/models/external_models/vbms_ext_claim.rb b/app/models/external_models/vbms_ext_claim.rb new file mode 100644 index 00000000000..5f430e96fe5 --- /dev/null +++ b/app/models/external_models/vbms_ext_claim.rb @@ -0,0 +1,48 @@ +# frozen_string_literal: true + +# This model represents entries in the vbms_ext_claim table +# VbmsExtClaims can have an associated EndProductEstablishment + +class VbmsExtClaim < CaseflowRecord + self.table_name = "vbms_ext_claim" + self.primary_key = "CLAIM_ID" + + has_one :end_product_establishment, foreign_key: "reference_id", primary_key: "claim_id" + + alias_attribute :claim_id, :CLAIM_ID + alias_attribute :claim_date, :CLAIM_DATE + alias_attribute :ep_code, :EP_CODE + alias_attribute :suspense_date, :SUSPENSE_DATE + alias_attribute :suspense_reason_code, :SUSPENSE_REASON_CODE + alias_attribute :suspense_reason_comments, :SUSPENSE_REASON_COMMENTS + alias_attribute :claimant_person_id, :CLAIMANT_PERSON_ID + alias_attribute :contention_count, :CONTENTION_COUNT + alias_attribute :claim_soj, :CLAIM_SOJ + alias_attribute :temporary_claim_soj, :TEMPORARY_CLAIM_SOJ + alias_attribute :priority, :PRIORITY + alias_attribute :type_code, :TYPE_CODE + alias_attribute :lifecycle_status_name, :LIFECYCLE_STATUS_NAME + alias_attribute :level_status_code, :LEVEL_STATUS_CODE + alias_attribute :submitter_application_code, :SUBMITTER_APPLICATION_CODE + alias_attribute :submitter_role_code, :SUBMITTER_ROLE_CODE + alias_attribute :veteran_person_id, :VETERAN_PERSON_ID + alias_attribute :establishment_date, :ESTABLISHMENT_DATE + alias_attribute :intake_site, :INTAKE_SITE + alias_attribute :payee_code, :PAYEE_CODE + alias_attribute :sync_id, :SYNC_ID + alias_attribute :createddt, :CREATEDDT + alias_attribute :lastupdatedt, :LASTUPDATEDT + alias_attribute :expirationdt, :EXPIRATIONDT + alias_attribute :version, :VERSION + alias_attribute :lifecycle_status_change_date, :LIFECYCLE_STATUS_CHANGE_DATE + alias_attribute :rating_soj, :RATING_SOJ + alias_attribute :program_type_code, :PROGRAM_TYPE_CODE + alias_attribute :service_type_code, :SERVICE_TYPE_CODE + alias_attribute :prevent_audit_trig, :PREVENT_AUDIT_TRIG + alias_attribute :pre_discharge_type_code, :PRE_DISCHARGE_TYPE_CODE + alias_attribute :pre_discharge_ind, :PRE_DISCHARGE_IND + alias_attribute :organization_name, :ORGANIZATION_NAME + alias_attribute :organization_soj, :ORGANIZATION_SOJ + alias_attribute :allow_poa_access, :ALLOW_POA_ACCESS + alias_attribute :poa_code, :POA_CODE +end diff --git a/app/models/legacy_appeal.rb b/app/models/legacy_appeal.rb index c2a36ba1cf8..10f572dd328 100644 --- a/app/models/legacy_appeal.rb +++ b/app/models/legacy_appeal.rb @@ -932,14 +932,15 @@ def claimant_participant_id veteran_is_not_claimant ? person_for_appellant&.participant_id : veteran&.participant_id end + # :reek:FeatureEnvy def hearing_day_if_schedueled hearing_date = Hearing.find_by(appeal_id: id) if hearing_date.nil? - return nil + nil else - return hearing_date.hearing_day.scheduled_for + hearing_date.hearing_day.scheduled_for end end @@ -947,9 +948,11 @@ def ui_hash Intake::LegacyAppealSerializer.new(self).serializable_hash[:data][:attributes] end + # rubocop:disable Naming/PredicateName def is_legacy? true end + # rubocop:enable Naming/PredicateName private diff --git a/app/models/legacy_hearing.rb b/app/models/legacy_hearing.rb index db640a7be0a..370ce6aa900 100644 --- a/app/models/legacy_hearing.rb +++ b/app/models/legacy_hearing.rb @@ -268,11 +268,12 @@ def original_request_type end end + # :reek:FeatureEnvy def prepare_worksheet_issues worksheet_issues = [] appeal.worksheet_issues.each_with_index do |wi, idx| worksheet_issues.push(wi.attributes) - issue = appeal.issues.find { |i| i.vacols_sequence_id.to_i == wi[:vacols_sequence_id].to_i } + issue = appeal.issues.find { |iss| iss.vacols_sequence_id.to_i == wi[:vacols_sequence_id].to_i } worksheet_issues[idx][:mst_status] = issue&.mst_status worksheet_issues[idx][:pact_status] = issue&.pact_status end diff --git a/app/models/membership_request.rb b/app/models/membership_request.rb index eacafc14aa2..7f0b91f7de8 100644 --- a/app/models/membership_request.rb +++ b/app/models/membership_request.rb @@ -75,9 +75,9 @@ def requesting_vha_predocket_access? def check_request_for_automatic_addition_to_vha_businessline(deciding_user) if requesting_vha_predocket_access? - vha_business_line = BusinessLine.find_by(url: "vha") + vha_business_line = VhaBusinessLine.singleton - # If the requestor also has an outstanding membership request to the vha_businessline approve it + # If the requestor also has an outstanding membership request to the vha_business_line approve it # Also send an approval email vha_business_line_request = requestor.membership_requests.assigned.find_by(organization: vha_business_line) vha_business_line_request&.update_status_and_send_email("approved", deciding_user, "VHA") diff --git a/app/models/metric.rb b/app/models/metric.rb new file mode 100644 index 00000000000..4fb6cfc4f4d --- /dev/null +++ b/app/models/metric.rb @@ -0,0 +1,103 @@ +# frozen_string_literal: true + +class Metric < CaseflowRecord + belongs_to :user + delegate :css_id, to: :user + + METRIC_TYPES = { error: "error", log: "log", performance: "performance", info: "info" }.freeze + LOG_SYSTEMS = { datadog: "datadog", rails_console: "rails_console", javascript_console: "javascript_console" }.freeze + PRODUCT_TYPES = { + queue: "queue", + hearings: "hearings", + intake: "intake", + vha: "vha", + efolder: "efolder", + reader: "reader", + caseflow: "caseflow", # Default product + # Added below because MetricService has usages of this as a service + vacols: "vacols", + bgs: "bgs", + gov_delivery: "gov_delivery", + mpi: "mpi", + pexip: "pexip", + va_dot_gov: "va_dot_gov", + va_notify: "va_notify", + vbms: "vbms" + }.freeze + APP_NAMES = { caseflow: "caseflow", efolder: "efolder" }.freeze + METRIC_GROUPS = { service: "service" }.freeze + + validates :metric_type, inclusion: { in: METRIC_TYPES.values } + validates :metric_product, inclusion: { in: PRODUCT_TYPES.values } + validates :metric_group, inclusion: { in: METRIC_GROUPS.values } + validates :app_name, inclusion: { in: APP_NAMES.values } + validate :sent_to_in_log_systems + + def self.create_metric(klass, params, user) + create(default_object(klass, params, user)) + end + + def self.create_metric_from_rest(klass, params, user) + params[:metric_attributes] = JSON.parse(params[:metric_attributes]) if params[:metric_attributes] + params[:additional_info] = JSON.parse(params[:additional_info]) if params[:additional_info] + params[:sent_to_info] = JSON.parse(params[:sent_to_info]) if params[:sent_to_info] + params[:relevant_tables_info] = JSON.parse(params[:relevant_tables_info]) if params[:relevant_tables_info] + + create(default_object(klass, params, user)) + end + + def sent_to_in_log_systems + invalid_systems = sent_to - LOG_SYSTEMS.values + msg = "contains invalid log systems. The following are valid log systems #{LOG_SYSTEMS.values}" + errors.add(:sent_to, msg) if !invalid_systems.empty? + end + + # Returns an object with defaults set if below symbols are not found in params default object. + # Looks for these symbols in params parameter + # - uuid + # - name + # - group + # - message + # - type + # - product + # - app_name + # - metric_attributes + # - additional_info + # - sent_to + # - sent_to_info + # - relevant_tables_info + # - start + # - end + # - duration + + # rubocop:disable Metrics/AbcSize, Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity + # :reek:ControlParameter + def self.default_object(klass, params, user) + { + uuid: params[:uuid], + user: user || RequestStore.store[:current_user] || User.system_user, + metric_name: params[:name] || METRIC_TYPES[:log], + metric_class: klass&.try(:name) || klass&.class&.name || name, + metric_group: params[:group] || METRIC_GROUPS[:service], + metric_message: params[:message] || METRIC_TYPES[:log], + metric_type: params[:type] || METRIC_TYPES[:log], + metric_product: PRODUCT_TYPES[params[:product].to_sym] || PRODUCT_TYPES[:caseflow], + app_name: params[:app_name] || APP_NAMES[:caseflow], + metric_attributes: params[:metric_attributes], + additional_info: params[:additional_info], + sent_to: Array(params[:sent_to]).flatten, + sent_to_info: params[:sent_to_info], + relevant_tables_info: params[:relevant_tables_info], + start: params[:start], + end: params[:end], + duration: calculate_duration(params[:start], params[:end], params[:duration]) + } + end + # rubocop:enable Metrics/AbcSize, Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity + + def self.calculate_duration(start, end_time, duration) + return duration if duration || !start || !end_time + + end_time - start + end +end diff --git a/app/models/organizations/business_line.rb b/app/models/organizations/business_line.rb index 78c5020fdb6..cc3a4f66ff0 100644 --- a/app/models/organizations/business_line.rb +++ b/app/models/organizations/business_line.rb @@ -5,11 +5,30 @@ def tasks_url "/decision_reviews/#{url}" end + def included_tabs + [:in_progress, :completed] + end + + def tasks_query_type + { + in_progress: "open", + completed: "recently_completed" + } + end + # Example Params: # sort_order: 'desc', # sort_by: 'assigned_at', # filters: [], # search_query: 'Bob' + def incomplete_tasks(pagination_params = {}) + QueryBuilder.new( + query_type: :incomplete, + query_params: pagination_params, + parent: self + ).build_query + end + def in_progress_tasks(pagination_params = {}) QueryBuilder.new( query_type: :in_progress, @@ -26,6 +45,14 @@ def completed_tasks(pagination_params = {}) ).build_query end + def incomplete_tasks_type_counts + QueryBuilder.new(query_type: :incomplete, parent: self).task_type_count + end + + def incomplete_tasks_issue_type_counts + QueryBuilder.new(query_type: :incomplete, parent: self).issue_type_count + end + def in_progress_tasks_type_counts QueryBuilder.new(query_type: :in_progress, parent: self).task_type_count end @@ -56,12 +83,10 @@ class QueryBuilder .and(Task.arel_table[:type].eq(DecisionReviewTask.name)) }.freeze - TASKS_QUERY_TYPE = { - in_progress: "open", - completed: "recently_completed" - }.freeze - DEFAULT_ORDERING_HASH = { + incomplete: { + sort_by: :assigned_at + }, in_progress: { sort_by: :assigned_at }, @@ -96,56 +121,38 @@ def task_type_count end # rubocop:disable Metrics/MethodLength + # rubocop:disable Metrics/AbcSize def issue_type_count - query_type_predicate = if query_type == :in_progress - "AND tasks.status IN ('assigned', 'in_progress', 'on_hold') - AND request_issues.closed_at IS NULL - AND request_issues.ineligible_reason IS NULL" - else - "AND tasks.status = 'completed' - AND #{Task.arel_table[:closed_at].between(7.days.ago..Time.zone.now).to_sql}" - end + shared_select_statement = "tasks.id as tasks_id, request_issues.nonrating_issue_category as issue_category" + appeals_query = Task.send(parent.tasks_query_type[query_type]) + .select(shared_select_statement) + .joins(ama_appeal: :request_issues) + .where(query_constraints) + hlr_query = Task.send(parent.tasks_query_type[query_type]) + .select(shared_select_statement) + .joins(supplemental_claim: :request_issues) + .where(query_constraints) + sc_query = Task.send(parent.tasks_query_type[query_type]) + .select(shared_select_statement) + .joins(higher_level_review: :request_issues) + .where(query_constraints) nonrating_issue_count = ActiveRecord::Base.connection.execute <<-SQL WITH task_review_issues AS ( - SELECT tasks.id as task_id, request_issues.nonrating_issue_category as issue_category - FROM tasks - INNER JOIN higher_level_reviews ON tasks.appeal_id = higher_level_reviews.id - AND tasks.appeal_type = 'HigherLevelReview' - INNER JOIN request_issues ON higher_level_reviews.id = request_issues.decision_review_id - AND request_issues.decision_review_type = 'HigherLevelReview' - WHERE request_issues.nonrating_issue_category IS NOT NULL - AND tasks.assigned_to_id = #{business_line_id} - AND tasks.assigned_to_type = 'Organization' - #{query_type_predicate} - UNION ALL - SELECT tasks.id as task_id, request_issues.nonrating_issue_category as issue_category - FROM tasks - INNER JOIN supplemental_claims ON tasks.appeal_id = supplemental_claims.id - AND tasks.appeal_type = 'SupplementalClaim' - INNER JOIN request_issues ON supplemental_claims.id = request_issues.decision_review_id - AND request_issues.decision_review_type = 'SupplementalClaim' - WHERE tasks.assigned_to_id = #{business_line_id} - AND tasks.assigned_to_type = 'Organization' - #{query_type_predicate} - UNION ALL - SELECT tasks.id as task_id, request_issues.nonrating_issue_category as issue_category - FROM tasks - INNER JOIN appeals ON tasks.appeal_id = appeals.id - AND tasks.appeal_type = 'Appeal' - INNER JOIN request_issues ON appeals.id = request_issues.decision_review_id - AND request_issues.decision_review_type = 'Appeal' - WHERE tasks.assigned_to_id = #{business_line_id} - AND tasks.assigned_to_type = 'Organization' - #{query_type_predicate} + #{hlr_query.to_sql} + UNION ALL + #{sc_query.to_sql} + UNION ALL + #{appeals_query.to_sql} ) - SELECT issue_category, COUNT(issue_category) AS nonrating_issue_count + SELECT issue_category, COUNT(1) AS nonrating_issue_count FROM task_review_issues GROUP BY issue_category; SQL issue_count_options = nonrating_issue_count.reduce({}) do |acc, hash| - acc.merge(hash["issue_category"] => hash["nonrating_issue_count"]) + key = hash["issue_category"] || "None" + acc.merge(key => hash["nonrating_issue_count"]) end # Merge in all of the possible issue types for businessline. Guess that the key is the snakecase url @@ -158,6 +165,7 @@ def issue_type_count issue_count_options end # rubocop:enable Metrics/MethodLength + # rubocop:enable Metrics/AbcSize private @@ -177,7 +185,8 @@ def union_select_statements participant_id_alias, veteran_ssn_alias, issue_types, - issue_types_lower + issue_types_lower, + appeal_unique_id_alias ] end @@ -226,6 +235,10 @@ def participant_id_alias "veterans.participant_id as veteran_participant_id" end + def appeal_unique_id_alias + "uuid as external_appeal_id" + end + # All join clauses # NOTE: .left_joins(ama_appeal: :request_issues) @@ -262,6 +275,17 @@ def bgs_attorneys_join "LEFT JOIN bgs_attorneys ON claimants.participant_id = bgs_attorneys.participant_id" end + def union_query_join_clauses + [ + veterans_join, + claimants_join, + people_join, + unrecognized_appellants_join, + party_details_join, + bgs_attorneys_join + ] + end + # These values reflect the number of searchable fields in search_all_clause for where interpolation later def number_of_search_fields FeatureToggle.enabled?(:decision_review_queue_ssn_column, user: RequestStore[:current_user]) ? 4 : 2 @@ -286,7 +310,7 @@ def search_all_clause end def group_by_columns - "tasks.id, veterans.participant_id, veterans.ssn, veterans.first_name, veterans.last_name, "\ + "tasks.id, uuid, veterans.participant_id, veterans.ssn, veterans.first_name, veterans.last_name, "\ "unrecognized_party_details.name, unrecognized_party_details.last_name, people.first_name, people.last_name, "\ "veteran_is_not_claimant, bgs_attorneys.name" end @@ -329,14 +353,9 @@ def ama_appeals_query def decision_reviews_on_request_issues(join_constraint, where_constraints = query_constraints) Task.select(union_select_statements) - .send(TASKS_QUERY_TYPE[query_type]) + .send(parent.tasks_query_type[query_type]) .joins(join_constraint) - .joins(veterans_join) - .joins(claimants_join) - .joins(people_join) - .joins(unrecognized_appellants_join) - .joins(party_details_join) - .joins(bgs_attorneys_join) + .joins(*union_query_join_clauses) .where(where_constraints) .where(search_all_clause, *search_values) .where(issue_type_filter_predicate(query_params[:filters])) @@ -358,21 +377,27 @@ def combined_decision_review_tasks_query def query_constraints { + incomplete: { + # Don't retrieve any tasks with closed issues or issues with ineligible reasons for incomplete + assigned_to: parent, + "request_issues.closed_at": nil, + "request_issues.ineligible_reason": nil + }, in_progress: { # Don't retrieve any tasks with closed issues or issues with ineligible reasons for in progress - assigned_to: business_line_id, + assigned_to: parent, "request_issues.closed_at": nil, "request_issues.ineligible_reason": nil }, completed: { - assigned_to: business_line_id + assigned_to: parent } }[query_type] end def board_grant_effectuation_task_constraints { - assigned_to: business_line_id, + assigned_to: parent, 'tasks.type': BoardGrantEffectuationTask.name } end @@ -443,9 +468,12 @@ def issue_type_filter_predicate(filters) def build_issue_type_filter_predicates(tasks_to_include) first_task_name, *remaining_task_names = tasks_to_include + first_task_name = nil if first_task_name == "None" + filter = RequestIssue.arel_table[:nonrating_issue_category].eq(first_task_name) remaining_task_names.each do |task_name| + task_name = nil if task_name == "None" filter = filter.or(RequestIssue.arel_table[:nonrating_issue_category].eq(task_name)) end @@ -455,7 +483,7 @@ def build_issue_type_filter_predicates(tasks_to_include) end def decision_review_requests_union_subquery(filter) - base_query = Task.select("tasks.id").send(TASKS_QUERY_TYPE[query_type]) + base_query = Task.select("tasks.id").send(parent.tasks_query_type[query_type]) union_query = Arel::Nodes::UnionAll.new( Arel::Nodes::UnionAll.new( base_query @@ -482,3 +510,5 @@ def locate_issue_type_filter(filters) end end end + +require_dependency "vha_business_line" diff --git a/app/models/organizations/vha_business_line.rb b/app/models/organizations/vha_business_line.rb new file mode 100644 index 00000000000..af4b9a98a2f --- /dev/null +++ b/app/models/organizations/vha_business_line.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +class VhaBusinessLine < BusinessLine + def self.singleton + VhaBusinessLine.first || VhaBusinessLine.find_or_create_by(name: Constants::BENEFIT_TYPES["vha"], url: "vha") + end + + def included_tabs + [:incomplete, :in_progress, :completed] + end + + def tasks_query_type + { + incomplete: "on_hold", + in_progress: "active", + completed: "recently_completed" + } + end +end diff --git a/app/models/other_claimant.rb b/app/models/other_claimant.rb index 79cbee4b0d1..b0ccd46bf6a 100644 --- a/app/models/other_claimant.rb +++ b/app/models/other_claimant.rb @@ -6,7 +6,7 @@ # Currently used for attorney fee cases when the attorney isn't found in the BGS attorney database. class OtherClaimant < Claimant - delegate :name, :first_name, :middle_name, :last_name, :suffix, :ssn, + delegate :name, :first_name, :middle_name, :last_name, :suffix, :ein, :ssn, :address, :address_line_1, :address_line_2, :address_line_3, :city, :state, :zip, :country, :date_of_birth, :email_address, :phone_number, diff --git a/app/models/priority_queues/priority_end_product_sync_queue.rb b/app/models/priority_queues/priority_end_product_sync_queue.rb new file mode 100644 index 00000000000..b15259373db --- /dev/null +++ b/app/models/priority_queues/priority_end_product_sync_queue.rb @@ -0,0 +1,71 @@ +# frozen_string_literal: true + +# Model for Priority End Product Sync Queue table. +# This table consists of records of End Product Establishment IDs that need to be synced with VBMS. + +# These are populated via the trigger that is created on creation of the vbms_ext_claim table +# The trigger is located in: +# db/scripts/external/create_vbms_ext_claim_table.rb +# db/scripts/ +class PriorityEndProductSyncQueue < CaseflowRecord + self.table_name = "priority_end_product_sync_queue" + + belongs_to :end_product_establishment + belongs_to :batch_process, foreign_key: "batch_id", primary_key: "batch_id" + has_many :caseflow_stuck_records, as: :stuck_record + + scope :batchable, -> { where(batch_id: [nil, BatchProcess.completed_batch_process_ids]) } + scope :ready_to_batch, lambda { + where("last_batched_at IS NULL OR last_batched_at <= ?", BatchProcess::ERROR_DELAY.hours.ago) + } + scope :batch_limit, -> { limit(BatchProcess::BATCH_LIMIT) } + scope :syncable, lambda { + where.not(status: [Constants.PRIORITY_EP_SYNC.synced, Constants.PRIORITY_EP_SYNC.stuck]) + } + + enum status: { + Constants.PRIORITY_EP_SYNC.not_processed.to_sym => Constants.PRIORITY_EP_SYNC.not_processed, + Constants.PRIORITY_EP_SYNC.pre_processing.to_sym => Constants.PRIORITY_EP_SYNC.pre_processing, + Constants.PRIORITY_EP_SYNC.processing.to_sym => Constants.PRIORITY_EP_SYNC.processing, + Constants.PRIORITY_EP_SYNC.synced.to_sym => Constants.PRIORITY_EP_SYNC.synced, + Constants.PRIORITY_EP_SYNC.error.to_sym => Constants.PRIORITY_EP_SYNC.error, + Constants.PRIORITY_EP_SYNC.stuck.to_sym => Constants.PRIORITY_EP_SYNC.stuck + } + + # Status Update methods + def status_processing! + update!(status: Constants.PRIORITY_EP_SYNC.processing) + end + + def status_sync! + update!(status: Constants.PRIORITY_EP_SYNC.synced) + end + + def status_error!(errors) + update!(status: Constants.PRIORITY_EP_SYNC.error, + error_messages: errors) + end + + # Method will update the status of the record to STUCK + # While also create a record within the caseflow_stuck_records table + # for later manual review. + def declare_record_stuck! + update!(status: Constants.PRIORITY_EP_SYNC.stuck) + CaseflowStuckRecord.create!(stuck_record: self, + error_messages: error_messages, + determined_stuck_at: Time.zone.now) + end + + # Purpose: Destroys "SYNCED" PEPSQ records to limit the growing number of table records. + # + # Params: The batch process the synced records belong to + # + # Response: Log message stating newly destroyed PEPSQ records + def self.destroy_batch_process_pepsq_records!(batch_process) + synced_records = batch_process.priority_end_product_sync_queue.where(status: Constants.PRIORITY_EP_SYNC.synced) + log_text = "PriorityEpSyncBatchProcessJob #{synced_records.size} synced records deleted:"\ + " #{synced_records.map(&:id)} Time: #{Time.zone.now}" + synced_records.delete_all + Rails.logger.info(log_text) + end +end diff --git a/app/models/promulgated_rating.rb b/app/models/promulgated_rating.rb index 4b1c90b0994..cea50950a9b 100644 --- a/app/models/promulgated_rating.rb +++ b/app/models/promulgated_rating.rb @@ -69,15 +69,7 @@ def fetch_rating_profile ) rescue Savon::Error {} - rescue BGS::ShareError => error - Raven.capture_exception(error) - - DataDogService.increment_counter( - metric_group: "errors", - metric_name: "rating_fetch_retries", - app_name: RequestStore[:application] - ) - + rescue BGS::ShareError retry_fetching_rating_profile end diff --git a/app/models/rating.rb b/app/models/rating.rb index 0c37254e591..f743e2f99ad 100644 --- a/app/models/rating.rb +++ b/app/models/rating.rb @@ -8,21 +8,6 @@ class Rating ONE_YEAR_PLUS_DAYS = 372.days TWO_LIFETIMES = 250.years - MST_SPECIAL_ISSUES = ["sexual assault trauma", "sexual trauma/assault", "sexual harassment"].freeze - PACT_SPECIAL_ISSUES = [ - "agent orange - outside vietnam or unknown", - "agent orange - vietnam", - "amyotrophic lateral sclerosis (als)", - "burn pit exposure", - "environmental hazard in gulf war", - "gulf war presumptive", - "radiation" - ].freeze - CONTENTION_PACT_ISSUES = [ - "pact", - "pactdicre", - "pees1" - ].freeze class NilRatingProfileListError < StandardError def ignorable? @@ -47,10 +32,6 @@ def fetch_in_range(*) fail Caseflow::Error::MustImplementInSubclass end - def fetch_contentions_by_participant_id(participant_id) - BGSService.new.find_contentions_by_participant_id(participant_id) - end - def sorted_ratings_from_bgs_response(response:, start_date:) unsorted = ratings_from_bgs_response(response) unpromulgated = unsorted.select { |rating| rating.promulgation_date.nil? } @@ -68,96 +49,6 @@ def fetch_promulgated(participant_id) def from_bgs_hash(_data) fail Caseflow::Error::MustImplementInSubclass end - - def special_issue_has_mst?(special_issue) - if special_issue[:spis_tn]&.casecmp("ptsd - personal trauma")&.zero? - return MST_SPECIAL_ISSUES.include?(special_issue[:spis_basis_tn]&.downcase) - end - - if special_issue[:spis_tn]&.casecmp("non-ptsd personal trauma")&.zero? - MST_SPECIAL_ISSUES.include?(special_issue[:spis_basis_tn]&.downcase) - end - end - - def special_issue_has_pact?(special_issue) - if special_issue[:spis_tn]&.casecmp("gulf war presumptive 3.320")&.zero? - return special_issue[:spis_basis_tn]&.casecmp("particulate matter")&.zero? - end - - PACT_SPECIAL_ISSUES.include?(special_issue[:spis_tn]&.downcase) - end - - def mst_from_contentions_for_rating?(contentions) - return false if contentions.blank? - - contentions.any? { |contention| mst_contention_status?(contention) } - end - - def pact_from_contentions_for_rating?(contentions) - return false if contentions.blank? - - contentions.any? { |contention| pact_contention_status?(contention) } - end - - def participant_contentions(serialized_hash) - # guard for MST/PACT feature toggle - # commented out for testing - # return [] unless FeatureToggle.enabled?(:mst_identification, user: RequestStore[:current_user]) || - # FeatureToggle.enabled?(:pact_identification, user: RequestStore[:current_user]) - - contentions_data = [] - response = fetch_contentions_by_participant_id(serialized_hash[:participant_id]) - - return if response.blank? - - serialized_hash[:rba_contentions_data].each do |rba| - rba_contention = rba.with_indifferent_access - response.each do |resp| - next unless resp.is_a?(Hash) - - # if only one contention, check the contention info - if resp.dig(:contentions).is_a?(Hash) - # get the single contention from the response - cntn = resp.dig(:contentions) - - next if cntn.blank? - - # see if the contetion ties to the rating - contentions_data << cntn if cntn.dig(:cntntn_id) == rba_contention.dig(:cntntn_id) - - # if the response contains an array of contentions, unpack each one and compare - elsif resp.dig(:contentions).is_a?(Array) - - resp.dig(:contentions).each do |contention| - next if contention.dig(:cntntn_id).blank? - - contentions_data << contention if contention.dig(:cntntn_id) == rba_contention.dig(:cntntn_id) - end - end - end - end - contentions_data.compact - end - - def mst_contention_status?(bgs_contention) - return false if bgs_contention.nil? || bgs_contention[:special_issues].blank? - - if bgs_contention[:special_issues].is_a?(Hash) - bgs_contention[:special_issues][:spis_tc] == "MST" - elsif bgs_contention[:special_issues].is_a?(Array) - bgs_contention[:special_issues].any? { |issue| issue[:spis_tc] == "MST" } - end - end - - def pact_contention_status?(bgs_contention) - return false if bgs_contention.nil? || bgs_contention[:special_issues].blank? - - if bgs_contention[:special_issues].is_a?(Hash) - CONTENTION_PACT_ISSUES.include?(bgs_contention[:special_issues][:spis_tc]&.downcase) - elsif bgs_contention[:special_issues].is_a?(Array) - bgs_contention[:special_issues].any? { |issue| CONTENTION_PACT_ISSUES.include?(issue[:spis_tc]&.downcase) } - end - end end # WARNING: profile_date is a misnomer adopted from BGS terminology. @@ -194,43 +85,10 @@ def decisions most_recent_disability_hash_for_issue = map_of_dis_sn_to_most_recent_disability_hash[disability[:dis_sn]] special_issues = most_recent_disability_hash_for_issue&.special_issues disability[:special_issues] = special_issues if special_issues - disability[:rba_contentions_data] = rba_contentions_data(disability) - RatingDecision.from_bgs_disability(self, disability) end end - def rba_contentions_data(disability) - rating_issues.each do |issue| - next unless issue[:dis_sn] == disability[:dis_sn] - - return ensure_array_of_hashes(issue[:rba_issue_contentions]) - end - end - - def veteran - @veteran ||= Veteran.find_by(participant_id: participant_id) - end - - def rating_issues - return [] unless veteran - - veteran.ratings.map { |rating| Array.wrap(rating.rating_profile[:rating_issues]) }.compact.flatten - - # return empty list when there are no ratings - rescue PromulgatedRating::BackfilledRatingError - # Ignore PromulgatedRating::BackfilledRatingErrors since they are a regular occurrence and we don't need to take - # any action when we see them. - [] - rescue PromulgatedRating::LockedRatingError => error - Raven.capture_exception(error) - [] - end - - def ensure_array_of_hashes(array_or_hash_or_nil) - [array_or_hash_or_nil || {}].flatten.map(&:deep_symbolize_keys) - end - def associated_end_products associated_claims_data.map do |claim_data| EndProduct.new( diff --git a/app/models/rating_decision.rb b/app/models/rating_decision.rb index 3308982934b..2cce62f309c 100644 --- a/app/models/rating_decision.rb +++ b/app/models/rating_decision.rb @@ -60,41 +60,8 @@ def from_bgs_disability(rating, disability) # rubocop:enable Metrics/MethodLength def deserialize(hash) - DataDogService.increment_counter( - metric_group: "mst_pact_group", - metric_name: "bgs_service.previous_service_call.rating_decision", - app_name: RequestStore[:application] - ) new(hash) end - - # rubocop:disable Metrics/CyclomaticComplexity - # rubocop:disable Metrics/PerceivedComplexity - def deserialize_special_issues(serialized_hash) - data = [] - if serialized_hash[:special_issues].present? - filtered_special_issues = serialized_hash[:special_issues].map do |special_issue| - special_issue.with_indifferent_access if special_issue.with_indifferent_access[:dis_sn] == serialized_hash[:disability_id] # rubocop:disable Layout/LineLength - end.compact - - filtered_special_issues.each do |special_issue| - data << { mst_available: true } if Rating.special_issue_has_mst?(special_issue) - - data << { pact_available: true } if Rating.special_issue_has_pact?(special_issue) - end - end - - if serialized_hash[:rba_contentions_data] - # get the contentions from the rating by the participant id - contentions = Rating.participant_contentions(serialized_hash) - data << { mst_available: true } if Rating.mst_from_contentions_for_rating?(contentions) - - data << { pact_available: true } if Rating.pact_from_contentions_for_rating?(contentions) - end - data - end - # rubocop:enable Metrics/PerceivedComplexity - # rubocop:enable Metrics/CyclomaticComplexity end def decision_text diff --git a/app/models/rating_issue.rb b/app/models/rating_issue.rb index 872bf791bd6..382c4c37a1f 100644 --- a/app/models/rating_issue.rb +++ b/app/models/rating_issue.rb @@ -37,28 +37,6 @@ class RatingIssue # app/serializers/intake/rating_issue_serializer.rb (used in RatingIssue#serialize) class << self - def deserialize_special_issues(serialized_hash) - # guard for MST/PACT feature toggle - return [] unless FeatureToggle.enabled?(:mst_identification, user: RequestStore[:current_user]) || - FeatureToggle.enabled?(:pact_identification, user: RequestStore[:current_user]) - - data = [] - serialized_hash[:special_issues]&.each do |special_issue| - data << { mst_available: true } if Rating.special_issue_has_mst?(special_issue) - - data << { pact_available: true } if Rating.special_issue_has_pact?(special_issue) - end - - if serialized_hash[:rba_contentions_data] - # get the contentinons from the rating by the participant id - contentions = Rating.participant_contentions(serialized_hash) - data << { mst_available: true } if Rating.mst_from_contentions_for_rating?(contentions) - - data << { pact_available: true } if Rating.pact_from_contentions_for_rating?(contentions) - end - data - end - def from_bgs_hash(rating, bgs_data) new( associated_end_products: rating.associated_end_products, @@ -93,7 +71,8 @@ def deserialize(serialized_hash) :promulgation_date, :rba_contentions_data, :reference_id, - :subject_text + :subject_text, + :special_issues ).merge(associated_end_products: deserialize_end_products(serialized_hash)) ) end diff --git a/app/models/remand_reason.rb b/app/models/remand_reason.rb index 2899f0d392c..0f1b4d6811d 100644 --- a/app/models/remand_reason.rb +++ b/app/models/remand_reason.rb @@ -1,7 +1,13 @@ # frozen_string_literal: true class RemandReason < CaseflowRecord + validates :post_aoj, inclusion: { in: [true, false] }, unless: :additional_remand_reasons_enabled? validates :code, inclusion: { in: Constants::AMA_REMAND_REASONS_BY_ID.values.map(&:keys).flatten } - validates :post_aoj, inclusion: { in: [true, false] } belongs_to :decision_issue + + private + + def additional_remand_reasons_enabled? + FeatureToggle.enabled?(:additional_remand_reasons) + end end diff --git a/app/models/request_issue.rb b/app/models/request_issue.rb index a4fa8d29f58..f860d486ff5 100644 --- a/app/models/request_issue.rb +++ b/app/models/request_issue.rb @@ -11,6 +11,7 @@ class RequestIssue < CaseflowRecord include HasBusinessLine include DecisionSyncable include HasDecisionReviewUpdatedSince + include SyncLock # how many days before we give up trying to sync decisions REQUIRES_PROCESSING_WINDOW_DAYS = 30 @@ -74,6 +75,13 @@ class RequestIssue < CaseflowRecord exclude_association :decision_review_id exclude_association :request_decision_issues end + + class DecisionDateInFutureError < StandardError + def initialize(request_issue_id) + super("Request Issue #{request_issue_id} cannot edit issue decision date " \ + "due to decision date being in the future") + end + end class ErrorCreatingDecisionIssue < StandardError def initialize(request_issue_id) super("Request Issue #{request_issue_id} cannot create decision issue " \ @@ -177,7 +185,7 @@ def from_intake_data(data, decision_review: nil) private - # rubocop:disable Metrics/MethodLength + # rubocop:disable Metrics/MethodLength, Metrics/AbcSize def attributes_from_intake_data(data) contested_issue_present = attributes_look_like_contested_issue?(data) issue_text = (data[:is_unidentified] || data[:verified_unidentified_issue]) ? data[:decision_text] : nil @@ -215,7 +223,7 @@ def attributes_from_intake_data(data) pact_status_update_reason_notes: data[:pact_status_update_reason_notes] } end - # rubocop:enable Metrics/MethodLength + # rubocop:enable Metrics/MethodLength, Metrics/AbcSize def attributes_look_like_contested_issue?(data) data[:rating_issue_reference_id] || @@ -254,11 +262,12 @@ def status_active? def mst_contention_status? return false if bgs_contention.nil? + if bgs_contention.special_issues.is_a?(Hash) - return bgs_contention.special_issues[:spis_tc] == 'MST' if bgs_contention&.special_issues + return bgs_contention.special_issues[:spis_tc] == "MST" if bgs_contention&.special_issues elsif bgs_contention.special_issues.is_a?(Array) bgs_contention.special_issues.each do |issue| - return true if issue[:spis_tc] == 'MST' + return true if issue[:spis_tc] == "MST" end end false @@ -266,11 +275,14 @@ def mst_contention_status? def pact_contention_status? return false if bgs_contention.nil? + if bgs_contention.special_issues.is_a?(Hash) - return ["PACT", "PACTDICRE", "PEES1"].include?(bgs_contention.special_issues[:spis_tc]) if bgs_contention&.special_issues + if bgs_contention&.special_issues + return %w[PACT PACTDICRE PEES1].include?(bgs_contention.special_issues[:spis_tc]) + end elsif bgs_contention.special_issues.is_a?(Array) bgs_contention.special_issues.each do |issue| - return true if ["PACT", "PACTDICRE", "PEES1"].include?(issue[:spis_tc]) + return true if %w[PACT PACTDICRE PEES1].include?(issue[:spis_tc]) end end false @@ -461,13 +473,21 @@ def sync_decision_issues! # to avoid a slow BGS call causing the transaction to timeout end_product_establishment.veteran - transaction do - return unless create_decision_issues - - end_product_establishment.on_decision_issue_sync_processed(self) - clear_error! - close_decided_issue! - processed! + ### hlr_sync_lock will stop any other request issues associated with the current End Product Establishment + ### from syncing with BGS concurrently if the claim is a Higher Level Review. This will ensure that + ### the remand supplemental claim generation that occurs within '#on_decision_issue_sync_processed' will + ### not be inadvertantly bypassed due to two request issues from the same claim being synced at the same + ### time. If this situation does occur, one of the request issues will error out with + ### Caseflow::Error:SyncLockFailed and be picked up to sync again later + hlr_sync_lock do + transaction do + return unless create_decision_issues + + end_product_establishment.on_decision_issue_sync_processed(self) + clear_error! + close_decided_issue! + processed! + end end end @@ -489,6 +509,10 @@ def close!(status:, closed_at_value: Time.zone.now) transaction do update!(closed_at: closed_at_value, closed_status: status) + + # Special handling for claim reviews that contain issues without a decision date + decision_review.try(:handle_issues_with_no_decision_date!) + yield if block_given? end end @@ -519,12 +543,22 @@ def save_edited_contention_text!(new_description) update!(edited_description: new_description, contention_updated_at: nil) end + def save_decision_date!(new_decision_date) + fail DecisionDateInFutureError, id if new_decision_date.to_date > Time.zone.today + + update!(decision_date: new_decision_date) + + # Special handling for claim reviews that contain issues without a decision date + decision_review.try(:handle_issues_with_no_decision_date!) + end + def remove! close!(status: :removed) do legacy_issue_optin&.flag_for_rollback! # If the decision issue is not associated with any other request issue, also delete decision_issues.each(&:soft_delete_on_removed_request_issue) + # Removing a request issue also deletes the associated request_decision_issue request_decision_issues.update_all(deleted_at: Time.zone.now) canceled! if submitted_not_processed? diff --git a/app/models/request_issues_update.rb b/app/models/request_issues_update.rb index 527ec14b792..2e9cedeed31 100644 --- a/app/models/request_issues_update.rb +++ b/app/models/request_issues_update.rb @@ -16,6 +16,7 @@ class RequestIssuesUpdate < CaseflowRecord delegate :withdrawn_issues, to: :withdrawal delegate :corrected_issues, :correction_issues, to: :correction + # rubocop:disable Metrics/MethodLength, Metrics/AbcSize def perform! return false unless validate_before_perform return false if processed? @@ -32,8 +33,10 @@ def perform! pact_edited_request_issue_ids: pact_edited_issues.map(&:id), corrected_request_issue_ids: corrected_issues.map(&:id) ) - create_mst_pact_issue_update_tasks if FeatureToggle.enabled?(:mst_identification, user: RequestStore[:current_user]) || - FeatureToggle.enabled?(:pact_identification, user: RequestStore[:current_user]) + if FeatureToggle.enabled?(:mst_identification, user: RequestStore[:current_user]) || + FeatureToggle.enabled?(:pact_identification, user: RequestStore[:current_user]) + create_mst_pact_issue_update_tasks + end create_business_line_tasks! if added_issues.present? cancel_active_tasks submit_for_processing! @@ -43,6 +46,7 @@ def perform! true end + # rubocop:enable Metrics/MethodLength, Metrics/AbcSize def process_job if run_async? @@ -103,7 +107,8 @@ def pact_edited_issues end def all_updated_issues - added_issues + removed_issues + withdrawn_issues + edited_issues + correction_issues + mst_edited_issues + pact_edited_issues + added_issues + removed_issues + withdrawn_issues + edited_issues + + correction_issues + mst_edited_issues + pact_edited_issues end private @@ -142,7 +147,14 @@ def calculate_pact_edited_issues def edited_issue_data return [] unless @request_issues_data - @request_issues_data.select { |ri| ri[:edited_description].present? && ri[:request_issue_id] } + @request_issues_data.select do |ri| + edited_issue?(ri) + end + end + + def edited_issue?(request_issue) + (request_issue[:edited_description].present? || request_issue[:edited_decision_date].present?) && + request_issue[:request_issue_id] end def mst_edited_issue_data @@ -240,12 +252,25 @@ def process_edited_issues! return if edited_issues.empty? edited_issue_data.each do |edited_issue| - RequestIssue.find( - edited_issue[:request_issue_id].to_s - ).save_edited_contention_text!(edited_issue[:edited_description]) + request_issue = RequestIssue.find(edited_issue[:request_issue_id].to_s) + edit_contention_text(edited_issue, request_issue) + edit_decision_date(edited_issue, request_issue) + end + end + + def edit_contention_text(edited_issue_params, request_issue) + if edited_issue_params[:edited_description] + request_issue.save_edited_contention_text!(edited_issue_params[:edited_description]) + end + end + + def edit_decision_date(edited_issue_params, request_issue) + if edited_issue_params[:edited_decision_date] + request_issue.save_decision_date!(edited_issue_params[:edited_decision_date]) end end + # :reek:FeatureEnvy def process_mst_edited_issues! return if mst_edited_issues.empty? @@ -258,6 +283,7 @@ def process_mst_edited_issues! end end + # :reek:FeatureEnvy def process_pact_edited_issues! return if pact_edited_issues.empty? @@ -298,9 +324,9 @@ def handle_mst_pact_edits_task after_issues = fetch_after_issues edited_issues = before_issues & after_issues # cycle each edited issue (before) and compare MST/PACT with (fetch_after_issues) - # reverse_each to make the issues on the case timeline appear in similar sequence to what user sees the edit issues page + # reverse_each to make the issues on the case timeline appear in UI in a similar sequence to the edit issues page edited_issues.reverse_each do |before_issue| - after_issue = after_issues.find { |i| i.id == before_issue.id } + after_issue = after_issues.find { |issue| issue.id == before_issue.id } # if before/after has a change in MST/PACT, create issue update task if (before_issue.mst_status != after_issue.mst_status) || (before_issue.pact_status != after_issue.pact_status) create_issue_update_task("Edited Issue", before_issue, after_issue) @@ -312,7 +338,7 @@ def handle_added_mst_pact_edits_task after_issues = fetch_after_issues added_issues = after_issues - before_issues added_issues.reverse_each do |issue| - if (issue.mst_status) || (issue.pact_status) + if issue.mst_status || issue.pact_status create_issue_update_task("Added Issue", issue) end end @@ -331,6 +357,8 @@ def handle_mst_pact_removal_task end end + # rubocop:disable Metrics/AbcSize, Metrics/CyclomaticComplexity, Metrics/MethodLength, Metrics/PerceivedComplexity + # :reek:FeatureEnvy def create_issue_update_task(change_type, before_issue, after_issue = nil) transaction do # close out any tasks that might be open @@ -348,19 +376,28 @@ def create_issue_update_task(change_type, before_issue, after_issue = nil) ) # check if change from vbms mst/pact status - vbms_mst_edit = before_issue.vbms_mst_status.nil? ? false : !before_issue.vbms_mst_status && before_issue.mst_status - vbms_pact_edit = before_issue.vbms_pact_status.nil? ? false : !before_issue.vbms_pact_status && before_issue.pact_status + vbms_mst_edit = if before_issue.vbms_mst_status.nil? + false + else + !before_issue.vbms_mst_status && before_issue.mst_status + end + + vbms_pact_edit = if before_issue.vbms_pact_status.nil? + false + else + !before_issue.vbms_pact_status && before_issue.pact_status + end # if a new issue is added and VBMS was edited, reference the original status if change_type == "Added Issue" && (vbms_mst_edit || vbms_pact_edit) - task.format_instructions( - change_type, - before_issue.contested_issue_description, - before_issue.benefit_type&.capitalize, - before_issue.vbms_mst_status, - before_issue.vbms_pact_status, - before_issue.mst_status, - before_issue.pact_status + set = CaseTimelineInstructionSet.new( + change_type: change_type, + issue_category: before_issue.contested_issue_description, + benefit_type: before_issue.benefit_type&.capitalize, + original_mst: before_issue.vbms_mst_status, + original_pact: before_issue.vbms_pact_status, + edit_mst: before_issue.mst_status, + edit_pact: before_issue.pact_status ) else # format the task instructions and close out @@ -368,17 +405,18 @@ def create_issue_update_task(change_type, before_issue, after_issue = nil) # rubocop:disable Layout/LineLength issue_description = "#{before_issue.nonrating_issue_category} - #{before_issue.nonrating_issue_description}" unless before_issue.nonrating_issue_category.nil? issue_description = before_issue.contested_issue_description if issue_description.nil? - task.format_instructions( - change_type, - issue_description, - before_issue.benefit_type&.capitalize, - before_issue.mst_status, - before_issue.pact_status, - after_issue&.mst_status, - after_issue&.pact_status + set = CaseTimelineInstructionSet.new( + change_type: change_type, + issue_category: issue_description, + benefit_type: before_issue.benefit_type&.capitalize, + original_mst: before_issue.mst_status, + original_pact: before_issue.pact_status, + edit_mst: after_issue&.mst_status, + edit_pact: after_issue&.pact_status ) end - # rubocop:enable Layout/LineLength + task.format_instructions(set) + # rubocop:enable Layout/LineLength, Metrics/AbcSize task.completed! # create SpecialIssueChange record to log the changes @@ -400,4 +438,5 @@ def create_issue_update_task(change_type, before_issue, after_issue = nil) ) end end + # rubocop:enable Metrics/CyclomaticComplexity, Metrics/MethodLength, Metrics/PerceivedComplexity end diff --git a/app/models/serializers/work_queue/decision_review_task_serializer.rb b/app/models/serializers/work_queue/decision_review_task_serializer.rb index e275252b4fe..7455091447b 100644 --- a/app/models/serializers/work_queue/decision_review_task_serializer.rb +++ b/app/models/serializers/work_queue/decision_review_task_serializer.rb @@ -34,6 +34,10 @@ def self.request_issues(object) decision_review(object).request_issues end + def self.power_of_attorney(object) + decision_review(object)&.power_of_attorney + end + def self.issue_count(object) object[:issue_count] || request_issues(object).active_or_ineligible.size end @@ -42,6 +46,14 @@ def self.veteran(object) decision_review(object).veteran end + def self.representative_tz(object) + decision_review(object)&.representative_tz + end + + attribute :has_poa do |object| + decision_review(object).claimant&.power_of_attorney.present? + end + attribute :claimant do |object| { name: claimant_name(object), @@ -55,15 +67,35 @@ def self.veteran(object) # If :issue_count is present then we're hitting this serializer from a Decision Review # queue table, and we do not need to gather request issues as they are not used there. skip_acquiring_request_issues = object[:issue_count] - { id: decision_review(object).external_id, + uuid: decision_review(object).uuid, isLegacyAppeal: false, issueCount: issue_count(object), - activeRequestIssues: skip_acquiring_request_issues || request_issues(object).active.map(&:serialize) + activeRequestIssues: skip_acquiring_request_issues || request_issues(object).active.map(&:serialize), + appellant_type: decision_review(object).claimant&.type } end + attribute :power_of_attorney do |object| + if power_of_attorney(object).nil? + nil + else + { + representative_type: power_of_attorney(object)&.representative_type, + representative_name: power_of_attorney(object)&.representative_name, + representative_address: power_of_attorney(object)&.representative_address, + representative_email_address: power_of_attorney(object)&.representative_email_address, + poa_last_synced_at: power_of_attorney(object)&.poa_last_synced_at, + representative_tz: representative_tz(object) + } + end + end + + attribute :appellant_type do |object| + decision_review(object).claimant&.type + end + attribute :issue_count do |object| issue_count(object) end @@ -97,6 +129,12 @@ def self.veteran(object) decision_review(object).is_a?(Appeal) ? "Board Grant" : decision_review(object).class.review_title end + attribute :external_appeal_id do |object| + object[:external_appeal_id] || decision_review(object).uuid + end + + attribute :appeal_type + attribute :business_line do |object| assignee = object.assigned_to diff --git a/app/models/special_issues_comparator.rb b/app/models/special_issues_comparator.rb new file mode 100644 index 00000000000..d73d53c9b34 --- /dev/null +++ b/app/models/special_issues_comparator.rb @@ -0,0 +1,226 @@ +# frozen_string_literal: true + +# class to get special issues from ratings +# built for MST/PACT release + +class SpecialIssuesComparator + attr_accessor :issue, :rating_special_issues, :bgs_client, :veteran_contentions, :linked_contentions + def initialize(issue) + @issue = issue + @rating_special_issues = issue&.special_issues + @bgs_client = BGSService.new + end + + MST_SPECIAL_ISSUES = ["sexual assault trauma", "sexual trauma/assault", "sexual harassment"].freeze + PACT_SPECIAL_ISSUES = [ + "agent orange - outside vietnam or unknown", + "agent orange - vietnam", + "amyotrophic lateral sclerosis (als)", + "burn pit exposure", + "environmental hazard in gulf war", + "gulf war presumptive", + "radiation" + ].freeze + CONTENTION_PACT_ISSUES = %w[ + pact + pactdicre + pees1 + ].freeze + CONTENTION_MST_ISSUES = [ + "mst" + ].freeze + + # returns a hash with mst_available and pact_available values + # values generated from ratings special issues and contentions + def special_issues + # guard for MST/PACT feature toggle + # commented out for testing + # return [] unless FeatureToggle.enabled?(:mst_identification, user: RequestStore[:current_user]) || + # FeatureToggle.enabled?(:pact_identification, user: RequestStore[:current_user]) + + [{ + mst_available: mst_from_rating_or_contention, + pact_available: pact_from_rating_or_contention + }] + end + + # check rating for existing mst status; if none, search contentions + def mst_from_rating_or_contention + return true if mst_from_rating? + return true if mst_from_contention? + + false + end + + # check rating for existing pact status; if none, search contentions + def pact_from_rating_or_contention + return true if pact_from_rating? + return true if pact_from_contention? + + false + end + + # cycles rating special issues and returns if a special issue is MST + def mst_from_rating? + return false if rating_special_issues.blank? + + rating_special_issues.each do |special_issue| + return true if special_issue_has_mst?(special_issue) + end + + false + end + + # cycles rating special issues and returns if a special issue is PACT + def pact_from_rating? + return false if rating_special_issues.blank? + + rating_special_issues.each do |special_issue| + return true if special_issue_has_pact?(special_issue) + end + + false + end + + # :reek:UtilityFunction + # checks if rating special issue meets MST criteria + def special_issue_has_mst?(special_issue) + special_issue.transform_keys!(&:to_s) + if special_issue["spis_tn"]&.casecmp("ptsd - personal trauma")&.zero? + return MST_SPECIAL_ISSUES.include?(special_issue["spis_basis_tn"]&.downcase) + end + + if special_issue["spis_tn"]&.casecmp("non-ptsd personal trauma")&.zero? + MST_SPECIAL_ISSUES.include?(special_issue["spis_basis_tn"]&.downcase) + end + end + + # :reek:UtilityFunction + # checks if rating special issue meets PACT criteria + def special_issue_has_pact?(special_issue) + special_issue.transform_keys!(&:to_s) + if special_issue["spis_tn"]&.casecmp("gulf war presumptive 3.320")&.zero? + return special_issue.keys(&:to_s)["spis_basis_tn"]&.casecmp("particulate matter")&.zero? + end + + PACT_SPECIAL_ISSUES.include?(special_issue["spis_tn"]&.downcase) + end + + # cycle contentions tied to the rating issue/decision and return true if there is a match for mst + def mst_from_contention? + self.linked_contentions ||= contentions_tied_to_issue + return false if linked_contentions.blank? + + linked_contentions.each do |contention| + return true if mst_contention_status?(contention) + end + + false + end + + # cycle contentions tied to the rating issue/decision and return true if there is a match for pact + def pact_from_contention? + self.linked_contentions ||= contentions_tied_to_issue + return false if linked_contentions.blank? + + linked_contentions.each do |contention| + return true if pact_contention_status?(contention) + end + + false + end + + # checks single contention special issue status for MST + # :reek:UtilityFunction + def mst_contention_status?(bgs_contention) + bgs_contention.transform_keys!(&:to_s) + return false if bgs_contention.nil? || bgs_contention["special_issues"].blank? + + if bgs_contention["special_issues"].is_a?(Hash) + CONTENTION_MST_ISSUES.include?(bgs_contention["special_issues"][:spis_tc]&.downcase) + elsif bgs_contention["special_issues"].is_a?(Array) + bgs_contention["special_issues"].any? { |issue| CONTENTION_MST_ISSUES.include?(issue[:spis_tc]&.downcase) } + end + end + + # checks single contention special issue status for PACT + # :reek:UtilityFunction + def pact_contention_status?(bgs_contention) + bgs_contention.transform_keys!(&:to_s) + return false if bgs_contention.nil? || bgs_contention["special_issues"].blank? + + if bgs_contention["special_issues"].is_a?(Hash) + CONTENTION_PACT_ISSUES.include?(bgs_contention["special_issues"][:spis_tc]&.downcase) + elsif bgs_contention["special_issues"].is_a?(Array) + bgs_contention["special_issues"].any? { |issue| CONTENTION_PACT_ISSUES.include?(issue[:spis_tc]&.downcase) } + end + end + + # get the contentions for the veteran, find the contentions that are tied to the rating issue + def contentions_tied_to_issue + # establish veteran contentions + self.veteran_contentions ||= fetch_contentions_by_participant_id(issue.participant_id) + + return nil if veteran_contentions.blank? + + match_ratings_with_contentions + end + + def fetch_contentions_by_participant_id(participant_id) + bgs_client.find_contentions_by_participant_id(participant_id) + end + + # cycles list of rba_contentions on the rating issue and matches them with + # contentions tied to the veteran + def match_ratings_with_contentions + contention_matches = [] + + return [] if issue.rba_contentions_data.blank? + + # cycle contentions tied to rating issue + issue.rba_contentions_data.each do |rba| + # grab contention on the rating + rba_contention = rba.with_indifferent_access + # cycle through the list of contentions from the BGS call (all contentions tied to veteran) + veteran_contentions.each do |contention| + next unless contention.is_a?(Hash) + + # store any matches that are found + link_contention_to_rating(contention, rba_contention, contention_matches) + end + end + contention_matches&.compact + end + + # rubocop:disable Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity + # :reek:UtilityFunction + + # takes the contention given and tries to match it to the current rating issue (issue) + def link_contention_to_rating(contention, rba_contention, contention_matches) + # if only one contention, check the contention info + if contention.dig(:contentions).is_a?(Hash) + # get the single contention from the response + single_contention_info = contention.dig(:contentions) + + return if single_contention_info.blank? + + # see if the contention ties to the rating. if it does, add it to the matches list + if single_contention_info.dig(:cntntn_id) == rba_contention.dig(:cntntn_id) + contention_matches << single_contention_info + end + + # if the response contains an array of contentions, unpack each one and compare + elsif contention.dig(:contentions).is_a?(Array) + + # cycle the contentions within the array to make the comparison to the rba_contention + contention.dig(:contentions).each do |contention_info| + next if contention_info.dig(:cntntn_id).blank? + + # see if the contention ties to the rating. if it does, add it to the matches list + contention_matches << contention_info if contention_info.dig(:cntntn_id) == rba_contention.dig(:cntntn_id) + end + end + contention_matches + end + # rubocop:enable Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity +end diff --git a/app/models/tasks/assign_hearing_disposition_task.rb b/app/models/tasks/assign_hearing_disposition_task.rb index 6eb94760b9f..a8d80b064cc 100644 --- a/app/models/tasks/assign_hearing_disposition_task.rb +++ b/app/models/tasks/assign_hearing_disposition_task.rb @@ -107,7 +107,13 @@ def postpone! fail HearingDispositionNotPostponed end - schedule_later + multi_transaction do + created_tasks = schedule_later + + cancel_redundant_hearing_postponement_req_tasks + + created_tasks + end end def no_show! @@ -240,10 +246,14 @@ def mark_hearing_with_disposition(payload_values:, instructions: nil) end clean_up_virtual_hearing - reschedule_or_schedule_later( + created_tasks = reschedule_or_schedule_later( instructions: instructions, after_disposition_update: payload_values[:after_disposition_update] ) + + cancel_redundant_hearing_postponement_req_tasks + + created_tasks end end diff --git a/app/models/tasks/establishment_task.rb b/app/models/tasks/establishment_task.rb index d576f9fc603..8f1f61798e4 100644 --- a/app/models/tasks/establishment_task.rb +++ b/app/models/tasks/establishment_task.rb @@ -7,6 +7,8 @@ def label "Establishment Task" end + # :reek:FeatureEnvy + # :reek:DuplicateMethodCall { max_calls: 2 } def format_instructions(request_issues) # format the instructions by loading an array and adding it to the instructions added_issue_format = [] @@ -23,7 +25,12 @@ def format_instructions(request_issues) end special_issue_status = format_special_issues_text(issue.mst_status, issue.pact_status).to_s - added_issue_format << [format_description_text(issue), issue.benefit_type.capitalize, original_special_issue_status, special_issue_status] + added_issue_format << [ + format_description_text(issue), + issue.benefit_type.capitalize, + original_special_issue_status, + special_issue_status + ] # create record to log the special issues changes create_special_issue_changes_record(issue) @@ -45,16 +52,20 @@ def format_description_text(issue) end end + # rubocop:disable Metrics/CyclomaticComplexity def format_special_issues_text(mst_status, pact_status) + # same method as issues_update_task # format the special issues comment to display the change in the special issues status(es) - s = "Special issues:" + special_issue_phrase = "Special Issues:" - return s + " None" if !mst_status && !pact_status - return s + " MST, PACT" if mst_status && pact_status - return s + " MST" if mst_status - return s + " PACT" if pact_status + return special_issue_phrase + " None" if !mst_status && !pact_status + return special_issue_phrase + " MST, PACT" if mst_status && pact_status + return special_issue_phrase + " MST" if mst_status + return special_issue_phrase + " PACT" if pact_status end + # rubocop:enable Metrics/CyclomaticComplexity + # :reek:FeatureEnvy def create_special_issue_changes_record(issue) # create SpecialIssueChange record to log the changes SpecialIssueChange.create!( diff --git a/app/models/tasks/hearing_mail_tasks/hearing_postponement_request_mail_task.rb b/app/models/tasks/hearing_mail_tasks/hearing_postponement_request_mail_task.rb new file mode 100644 index 00000000000..878faccbd96 --- /dev/null +++ b/app/models/tasks/hearing_mail_tasks/hearing_postponement_request_mail_task.rb @@ -0,0 +1,334 @@ +# frozen_string_literal: true + +## +# Task to process a hearing postponement request received via the mail +# +# When this task is created: +# - It's parent task is set as the RootTask of the associated appeal +# - The task is assigned to the MailTeam to track where the request originated +# - A child task of the same name is created and assigned to the HearingAdmin organization +## +class HearingPostponementRequestMailTask < HearingRequestMailTask + prepend HearingPostponed + include RunAsyncable + + class << self + def label + COPY::HEARING_POSTPONEMENT_REQUEST_MAIL_TASK_LABEL + end + + def allow_creation?(*) + true + end + end + + TASK_ACTIONS = [ + Constants.TASK_ACTIONS.CHANGE_TASK_TYPE.to_h, + Constants.TASK_ACTIONS.COMPLETE_AND_POSTPONE.to_h, + Constants.TASK_ACTIONS.ASSIGN_TO_TEAM.to_h, + Constants.TASK_ACTIONS.ASSIGN_TO_PERSON.to_h, + Constants.TASK_ACTIONS.CANCEL_TASK.to_h + ].freeze + + # Purpose: Determines the actions a user can take depending on their permissions and the state of the appeal + # Params: user - The current user object + # Return: The task actions array of objects + def available_actions(user) + return [] unless user.in_hearing_admin_team? + + if active_schedule_hearing_task || hearing_scheduled_and_awaiting_disposition? + TASK_ACTIONS + else + [ + Constants.TASK_ACTIONS.CHANGE_TASK_TYPE.to_h, + Constants.TASK_ACTIONS.CANCEL_TASK.to_h + ] + end + end + + # Purpose: Updates the current state of the appeal + # Params: params - The update params object + # user - The current user object + # Return: The current hpr task and newly created tasks + def update_from_params(params, user) + payload_values = params.delete(:business_payloads)&.dig(:values) || params + + # If the request is to mark HPR mail task complete + if payload_values[:granted]&.to_s.present? + # If request to postpone hearing is granted + if payload_values[:granted] + created_tasks = update_hearing_and_create_tasks(payload_values[:after_disposition_update]) + end + update_self_and_parent_mail_task(user: user, payload_values: payload_values) + + [self] + (created_tasks || []) + else + super(params, user) + end + end + + # Purpose: Only show HPR mail task assigned to "HearingAdmin" on the Case Timeline + # Params: None + # Return: boolean if task is assigned to MailTeam + def hide_from_case_timeline + assigned_to.is_a?(MailTeam) + end + + # Purpose: Determines if there is an open hearing + # Params: None + # Return: The hearing if one exists + def open_hearing + @open_hearing ||= open_assign_hearing_disposition_task&.hearing + end + + # Purpose: Gives the latest hearing task + # Params: None + # Return: The hearing task + def hearing_task + @hearing_task ||= open_hearing&.hearing_task || active_schedule_hearing_task.parent + end + + # Purpose: When a hearing is postponed through the completion of a NoShowHearingTask, AssignHearingDispositionTask, + # or ChangeHearingDispositionTask, cancel any open HearingPostponementRequestMailTasks in that appeal's + # task tree, as the HPR mail tasks have become redundant. + # + # Params: completed_task - task object of the completed task through which the hearing was postponed + # updated_at - datetime when the task was completed + # + # Return: The cancelled HPR mail tasks + def cancel_when_redundant(completed_task, updated_at) + user = ensure_user_can_cancel_task(completed_task) + params = { + status: Constants.TASK_STATUSES.cancelled, + instructions: format_cancellation_reason(completed_task.type, updated_at) + } + update_from_params(params, user) + end + + private + + # Purpose: Gives the latest active hearing task + # Params: None + # Return: The latest active hearing task + def active_schedule_hearing_task + appeal.tasks.of_type(ScheduleHearingTask.name).active.first + end + + # ChangeHearingDispositionTask is a subclass of AssignHearingDispositionTask + ASSIGN_HEARING_DISPOSITION_TASKS = [ + AssignHearingDispositionTask.name, + ChangeHearingDispositionTask.name + ].freeze + + # Purpose: Gives the latest active assign hearing disposition task + # Params: None + # Return: The latest active assign hearing disposition task + def open_assign_hearing_disposition_task + @open_assign_hearing_disposition_task ||= appeal.tasks.of_type(ASSIGN_HEARING_DISPOSITION_TASKS).open&.first + end + + # Purpose: Associated appeal has an upcoming hearing with an open status + # Params: None + # Return: Returns a boolean if the appeal has an upcoming hearing + def hearing_scheduled_and_awaiting_disposition? + return false unless open_hearing + + # Ensure associated hearing is not scheduled for the past + !open_hearing.scheduled_for_past? + end + + # Purpose: Sets the previous hearing's disposition to postponed + # Params: None + # Return: Returns a boolean for if the hearing has been updated + def postpone_previous_hearing + update_hearing(disposition: Constants.HEARING_DISPOSITION_TYPES.postponed) + end + + # Purpose: Wrapper for updating hearing and creating new hearing tasks + # Params: Params object for additional tasks or updates after updating the hearing + # Return: Returns the newly created tasks + def update_hearing_and_create_tasks(after_disposition_update) + multi_transaction do + # If hearing exists, postpone previous hearing and handle conference links + + if open_hearing + postpone_previous_hearing + clean_up_virtual_hearing + end + # Schedule hearing or create new ScheduleHearingTask depending on after disposition action + reschedule_or_schedule_later(after_disposition_update) + end + end + + # Purpose: Sets the previous hearing's disposition + # Params: None + # Return: Returns a boolean for if the hearing has been updated + def update_hearing(hearing_hash) + if open_hearing.is_a?(LegacyHearing) + open_hearing.update_caseflow_and_vacols(hearing_hash) + else + open_hearing.update(hearing_hash) + end + end + + # Purpose: Deletes the old scheduled virtual hearings + # Params: None + # Return: Returns nil + def clean_up_virtual_hearing + if open_hearing.virtual? + perform_later_or_now(VirtualHearings::DeleteConferencesJob) + end + end + + # Purpose: Either reschedule or send to schedule veteran list + # Params: None + # Return: Returns newly created tasks + # :reek:FeatureEnvy + def reschedule_or_schedule_later(after_disposition_update) + case after_disposition_update[:action] + when "reschedule" + new_hearing_attrs = after_disposition_update[:new_hearing_attrs] + reschedule( + scheduled_time_string: new_hearing_attrs[:scheduled_time_string], + hearing_day_id: new_hearing_attrs[:hearing_day_id], + hearing_location: new_hearing_attrs[:hearing_location], + virtual_hearing_attributes: new_hearing_attrs[:virtual_hearing_attributes], + notes: new_hearing_attrs[:notes], + email_recipients_attributes: new_hearing_attrs[:email_recipients] + ) + when "schedule_later" + schedule_later + else + fail ArgumentError, "unknown disposition action" + end + end + + # rubocop:disable Metrics/ParameterLists + # Purpose: Reschedules the hearings + # Params: hearing_day_id - The ID of the hearing day that its going to be scheduled + # scheduled_time_string - The string for the scheduled time + # hearing_location - The hearing location string + # virtual_hearing_attributes - object for virtual hearing attributes + # notes - additional notes for the hearing string + # email_recipients_attributes - the object for the email recipients + # Return: Returns new hearing and assign disposition task + # :reek:LongParameterList + def reschedule( + hearing_day_id:, + scheduled_time_string:, + hearing_location: nil, + virtual_hearing_attributes: nil, + notes: nil, + email_recipients_attributes: nil + ) + multi_transaction do + new_hearing_task = hearing_task.cancel_and_recreate + + new_hearing = HearingRepository.slot_new_hearing(hearing_day_id: hearing_day_id, + appeal: appeal, + hearing_location_attrs: hearing_location&.to_hash, + scheduled_time_string: scheduled_time_string, + notes: notes) + if virtual_hearing_attributes.present? + @alerts = VirtualHearings::ConvertToVirtualHearingService + .convert_hearing_to_virtual(new_hearing, virtual_hearing_attributes) + elsif email_recipients_attributes.present? + create_or_update_email_recipients(new_hearing, email_recipients_attributes) + end + + disposition_task = AssignHearingDispositionTask + .create_assign_hearing_disposition_task!(appeal, new_hearing_task, new_hearing) + + AppellantNotification.notify_appellant(appeal, "Hearing scheduled") + + [new_hearing_task, disposition_task] + end + end + # rubocop:enable Metrics/ParameterLists + + # Purpose: Sends the appeal back to the scheduling list + # Params: None + # Return: Returns the new hearing task and schedule task + def schedule_later + new_hearing_task = hearing_task.cancel_and_recreate + schedule_task = ScheduleHearingTask.create!(appeal: appeal, parent: new_hearing_task) + + [new_hearing_task, schedule_task].compact + end + + # Purpose: Completes the Mail task assigned to the MailTeam and the one for HearingAdmin + # Params: user - The current user object + # payload_values - The attributes needed for the update + # Return: Boolean for if the tasks have been updated + def update_self_and_parent_mail_task(user:, payload_values:) + # Append instructions/context provided by HearingAdmin to original details from MailTeam + updated_instructions = format_instructions_on_completion( + admin_context: payload_values[:instructions], + ruling: payload_values[:granted] ? "GRANTED" : "DENIED", + date_of_ruling: payload_values[:date_of_ruling] + ) + + # Complete HPR mail task assigned to HearingAdmin + update!( + completed_by: user, + status: Constants.TASK_STATUSES.completed, + instructions: updated_instructions + ) + # Complete parent HPR mail task assigned to MailTeam + update_parent_status + end + + # Purpose: Appends instructions on to the instructions provided in the mail task + # Params: admin_context - String for instructions + # ruling - string for granted or denied + # date_of_ruling - string for the date of ruling + # Return: instructions string + def format_instructions_on_completion(admin_context:, ruling:, date_of_ruling:) + formatted_date = date_of_ruling.to_date&.strftime("%m/%d/%Y") + + markdown_to_append = <<~EOS + + *** + + ###### Marked as complete: + + **DECISION** + Motion to postpone #{ruling} + + **DATE OF RULING** + #{formatted_date} + + **DETAILS** + #{admin_context} + EOS + + [instructions[0] + markdown_to_append] + end + + # Purpose: If hearing postponed by a member of HearingAdminTeam, return that user. Otherwise, in the + # case that hearing in postponed by HearingChangeDispositionJob, current_user is system_user + # and will not have permission to call Task#update_from_params. Instead, return a user with + # with HearingAdmin privileges. + # + # Params: completed_task - Task object of task through which heairng was postponed + def ensure_user_can_cancel_task(completed_task) + current_user = RequestStore[:current_user] + + return current_user if current_user&.in_hearing_admin_team? + + completed_task.hearing.updated_by + end + + # Purpose: Format context to be appended to HPR mail tasks instructions upon task cancellation + # + # Params: task_name - string of name of completed task through which hearing was postponed + # updated_at - datetime when the task was completed + # + # Return: String to be submitted in instructions field of task + def format_cancellation_reason(task_name, updated_at) + formatted_date = updated_at.strftime("%m/%d/%Y") + + "##### REASON FOR CANCELLATION:\n" \ + "Hearing postponed when #{task_name} was completed on #{formatted_date}" + end +end diff --git a/app/models/tasks/hearing_mail_tasks/hearing_request_mail_task.rb b/app/models/tasks/hearing_mail_tasks/hearing_request_mail_task.rb new file mode 100644 index 00000000000..40f0ee7503a --- /dev/null +++ b/app/models/tasks/hearing_mail_tasks/hearing_request_mail_task.rb @@ -0,0 +1,44 @@ +# frozen_string_literal: true + +## +# Task to serve as interface with shared methods for the following hearings mail tasks: +# - HearingPostponementRequestMailTask +# - HearingWithdrawalRequestMailTask +# HearingRequestMailTask is itself not an assignable task type +## +class HearingRequestMailTask < MailTask + include RunAsyncable + validates :parent, presence: true, on: :create + + before_validation :verify_request_type_designated + + class HearingAssociationMissing < StandardError + def initialize + super(format(COPY::HEARING_TASK_ASSOCIATION_MISSING_MESSAGE, hearing_task_id)) + end + end + + class << self + def allow_creation?(*) + false + end + + # All descendant postponement/withdrawal tasks will initially be assigned to the Hearing Admin org + def default_assignee(_task) + HearingAdmin.singleton + end + end + + def available_actions(_user) + [] + end + + private + + # Ensure create is called on a descendant mail task and not directly on the HearingRequestMailTask class + def verify_request_type_designated + if self.class == HearingRequestMailTask + fail Caseflow::Error::InvalidTaskTypeOnTaskCreate, task_type: type + end + end +end diff --git a/app/models/tasks/issues_update_task.rb b/app/models/tasks/issues_update_task.rb index 7a9aa53ba65..5239d9294e1 100644 --- a/app/models/tasks/issues_update_task.rb +++ b/app/models/tasks/issues_update_task.rb @@ -7,20 +7,20 @@ def label "Issues Update Task" end - def format_instructions(change_type, issue_category, benefit_type, original_mst, original_pact, edit_mst = nil, edit_pact = nil, - _mst_edit_reason = nil, _pact_edit_reason = nil) + # :reek:FeatureEnvy + def format_instructions(set) # format the instructions by loading an array and adding it to the instructions edit_issue_format = [] # add the change type - edit_issue_format << change_type - edit_issue_format << benefit_type - edit_issue_format << issue_category - original_comment = format_special_issues_text(original_mst, original_pact).to_s + edit_issue_format << set.change_type + edit_issue_format << set.benefit_type + edit_issue_format << set.issue_category + original_comment = format_special_issues_text(set.original_mst, set.original_pact).to_s edit_issue_format << original_comment # format edit if edit values are given - unless edit_mst.nil? || edit_pact.nil? - updated_comment = format_special_issues_text(edit_mst, edit_pact).to_s + unless set.edit_mst.nil? || set.edit_pact.nil? + updated_comment = format_special_issues_text(set.edit_mst, set.edit_pact).to_s edit_issue_format << updated_comment end @@ -36,13 +36,15 @@ def format_instructions(change_type, issue_category, benefit_type, original_mst, private + # rubocop:disable Metrics/CyclomaticComplexity def format_special_issues_text(mst_status, pact_status) # format the special issues comment to display the change in the special issues status(es) - s = "Special Issues:" + special_issue_status = "Special Issues:" - return s + " None" if !mst_status && !pact_status - return s + " MST, PACT" if mst_status && pact_status - return s + " MST" if mst_status - return s + " PACT" if pact_status + return special_issue_status + " None" if !mst_status && !pact_status + return special_issue_status + " MST, PACT" if mst_status && pact_status + return special_issue_status + " MST" if mst_status + return special_issue_status + " PACT" if pact_status end + # rubocop:enable Metrics/CyclomaticComplexity end diff --git a/app/models/tasks/judge_dispatch_return_task.rb b/app/models/tasks/judge_dispatch_return_task.rb index ad3ada10d61..fe0a6209656 100644 --- a/app/models/tasks/judge_dispatch_return_task.rb +++ b/app/models/tasks/judge_dispatch_return_task.rb @@ -16,6 +16,7 @@ def self.label COPY::JUDGE_DISPATCH_RETURN_TASK_LABEL end + # :reek:UtilityFunction def ama_issue_checkout # bypass special issues page if mst/pact enabled return Constants.TASK_ACTIONS.JUDGE_AMA_CHECKOUT.to_h if diff --git a/app/models/tasks/mail_task.rb b/app/models/tasks/mail_task.rb index 9e68708bd59..c3709f2bda4 100644 --- a/app/models/tasks/mail_task.rb +++ b/app/models/tasks/mail_task.rb @@ -10,6 +10,8 @@ # - withdrawing an appeal # - switching dockets # - add post-decision motions +# - postponing a hearing +# - withdrawing a hearing # Adding a mail task to an appeal is done by mail team members and will create a task assigned to the mail team. It # will also automatically create a child task assigned to the team the task should be routed to. @@ -18,15 +20,21 @@ class MailTask < Task def verify_org_task_unique; end prepend PrivacyActPending + # This constant is more efficient than iterating through all mail tasks + # and filtering out almost all of them since only HPR and HWR are approved for now + LEGACY_MAIL_TASKS = [ + { label: "Hearing postponement request", value: "HearingPostponementRequestMailTask" } + ].freeze + class << self def blocking? # Some open mail tasks should block distribution of an appeal to judges. - # Define this method in subclasses for blocking task types. + # Define this method in descendants for blocking task types. false end - def subclass_routing_options(user: nil, appeal: nil) - filtered = MailTask.subclasses.select { |sc| sc.allow_creation?(user: user, appeal: appeal) } + def descendant_routing_options(user: nil, appeal: nil) + filtered = MailTask.descendants.select { |sc| sc.allow_creation?(user: user, appeal: appeal) } sorted = filtered.sort_by(&:label).map { |subclass| { value: subclass.name, label: subclass.label } } sorted end diff --git a/app/models/tasks/no_show_hearing_task.rb b/app/models/tasks/no_show_hearing_task.rb index 030db306314..ef7b3e8af1d 100644 --- a/app/models/tasks/no_show_hearing_task.rb +++ b/app/models/tasks/no_show_hearing_task.rb @@ -18,6 +18,8 @@ class NoShowHearingTask < Task before_validation :set_assignee + delegate :hearing, to: :parent, allow_nil: true + DAYS_ON_HOLD = 15 def self.create_with_hold(parent_task) @@ -61,6 +63,8 @@ def reschedule_hearing ScheduleHearingTask.create!(appeal: appeal, parent: ancestor_task_of_type(HearingTask)&.parent) update!(status: Constants.TASK_STATUSES.completed) + + cancel_redundant_hearing_postponement_req_tasks end end diff --git a/app/models/tasks/root_task.rb b/app/models/tasks/root_task.rb index 4a06df49d8a..5d46ed2e1d3 100644 --- a/app/models/tasks/root_task.rb +++ b/app/models/tasks/root_task.rb @@ -76,8 +76,9 @@ def hide_from_task_snapshot true end + # :reek:UtilityFunction def available_actions(user) - return [Constants.TASK_ACTIONS.CREATE_MAIL_TASK.to_h] if RootTask.user_can_create_mail_task?(user) && ama? + return [Constants.TASK_ACTIONS.CREATE_MAIL_TASK.to_h] if RootTask.user_can_create_mail_task?(user) [] end diff --git a/app/models/user.rb b/app/models/user.rb index aa36efb6e0e..b7c780cab79 100644 --- a/app/models/user.rb +++ b/app/models/user.rb @@ -275,7 +275,7 @@ def camo_employee? end def vha_employee? - member_of_organization?(BusinessLine.find_by(url: "vha")) + member_of_organization?(VhaBusinessLine.singleton) end def organization_queue_user? @@ -650,6 +650,7 @@ def prod_system_user end alias preprod_system_user prod_system_user + alias prodtest_system_user prod_system_user def uat_system_user find_or_initialize_by(station_id: "317", css_id: "CASEFLOW1") diff --git a/app/models/vacols/case_hearing.rb b/app/models/vacols/case_hearing.rb index b51066b55da..a0770103220 100644 --- a/app/models/vacols/case_hearing.rb +++ b/app/models/vacols/case_hearing.rb @@ -3,12 +3,20 @@ class VACOLS::CaseHearing < VACOLS::Record self.table_name = "hearsched" self.primary_key = "hearing_pkseq" + self.sequence_name = "hearsched_pkseq" - # :autogenerated allows a trigger to set the new sequence value for a primary key. + # @note The sequence for primary key `hearing_pkseq` is generated by a trigger defined on the VACOLS table + # @see https://github.com/department-of-veterans-affairs/VACOLS/blob/0999c2b1e6d7561ab142d35f93c76a291aa7cce6/vacols_schema.sql#L5058-L5073 # - # COMPATIBILITY NOTE: Support for :autogenerated is dropped in Rails 6. - # See Issue: https://github.com/rsim/oracle-enhanced/issues/1643 - self.sequence_name = :autogenerated + # @note Support for trigger-based primary keys is removed from `activerecord-oracle_enhanced-adapter` starting v6.0.Z: + # @see https://github.com/rsim/oracle-enhanced/blob/81fbebec11f15fbc9724c6b36e98151fbd374b75/lib/active_record/connection_adapters/oracle_enhanced_adapter.rb#L452 + # + # @note This is a workaround, which overrides `ActiveRecord::ModelSchema::ClassMethods #next_sequence_value` to always + # return `nil`, as it does on `activerecord-oracle_enhanced-adapter` before v6.0.Z: + # @see https://github.com/rsim/oracle-enhanced/blob/95065589d4d49ea24eb79afc2883fc6180d97e67/lib/active_record/connection_adapters/oracle_enhanced_adapter.rb#L422 + def self.next_sequence_value + nil + end attribute :hearing_date, :datetime attribute :notes1, :ascii_string diff --git a/app/models/vha_membership_request_mail_builder.rb b/app/models/vha_membership_request_mail_builder.rb index ba3a916a227..7b29401d227 100644 --- a/app/models/vha_membership_request_mail_builder.rb +++ b/app/models/vha_membership_request_mail_builder.rb @@ -136,12 +136,13 @@ def requestor_vha_pending_organization_request_names end def organization_vha?(organization) - vha_organization_types = [VhaCamo, VhaCaregiverSupport, VhaProgramOffice, VhaRegionalOffice] - organization.url == "vha" || vha_organization_types.any? { |vha_org| organization.is_a?(vha_org) } + vha_organization_types = [VhaBusinessLine, VhaCamo, VhaCaregiverSupport, VhaProgramOffice, VhaRegionalOffice] + vha_organization_types.any? { |vha_org| organization.is_a?(vha_org) } end def belongs_to_vha_org? - requestor.organizations.any? { |org| org.url == "vha" } + # requestor.organizations.any? { |org| org.url == "vha" } + requestor.member_of_organization?(VhaBusinessLine.singleton) end def single_request diff --git a/app/queries/batch_users_for_reader_query.rb b/app/queries/batch_users_for_reader_query.rb index be31351ea41..abadd883cb8 100644 --- a/app/queries/batch_users_for_reader_query.rb +++ b/app/queries/batch_users_for_reader_query.rb @@ -7,7 +7,7 @@ def self.process User.where("(efolder_documents_fetched_at <= ? " \ "OR efolder_documents_fetched_at IS NULL) " \ "AND last_login_at >= ?", 24.hours.ago, 1.week.ago) - .order("efolder_documents_fetched_at IS NULL DESC, efolder_documents_fetched_at ASC") + .order(Arel.sql("efolder_documents_fetched_at IS NULL DESC, efolder_documents_fetched_at ASC")) .limit(DEFAULT_USERS_LIMIT) end end diff --git a/app/repositories/task_action_repository.rb b/app/repositories/task_action_repository.rb index efa50b882db..384898aeb67 100644 --- a/app/repositories/task_action_repository.rb +++ b/app/repositories/task_action_repository.rb @@ -18,8 +18,13 @@ def assign_to_organization_data(task, _user = nil) end def mail_assign_to_organization_data(task, user = nil) - options = MailTask.subclass_routing_options(user: user, appeal: task.appeal) - valid_options = task.appeal.outcoded? ? options : options.reject { |opt| opt[:value] == "VacateMotionMailTask" } + if task.appeal.is_a? Appeal + options = MailTask.descendant_routing_options(user: user, appeal: task.appeal) + .reject { |opt| opt[:value] == task.type } + valid_options = task.appeal.outcoded? ? options : options.reject { |opt| opt[:value] == "VacateMotionMailTask" } + elsif task.appeal.is_a? LegacyAppeal + valid_options = MailTask::LEGACY_MAIL_TASKS + end { options: valid_options } end @@ -586,7 +591,7 @@ def docket_appeal_data(task, _user) modal_body: format(COPY::DOCKET_APPEAL_MODAL_BODY, pre_docket_org), modal_button_text: COPY::MODAL_CONFIRM_BUTTON, modal_alert: COPY::DOCKET_APPEAL_MODAL_NOTICE, - instructions_label: COPY::PRE_DOCKET_MODAL_BODY, + instructions_label: COPY::PROVIDE_INSTRUCTIONS_AND_CONTEXT_LABEL, redirect_after: "/organizations/#{BvaIntake.singleton.url}" } end @@ -640,7 +645,7 @@ def vha_assign_to_program_office_data(*) modal_title: COPY::VHA_ASSIGN_TO_PROGRAM_OFFICE_MODAL_TITLE, modal_button_text: COPY::MODAL_ASSIGN_BUTTON, modal_selector_placeholder: COPY::VHA_PROGRAM_OFFICE_SELECTOR_PLACEHOLDER, - instructions_label: COPY::PRE_DOCKET_MODAL_BODY, + instructions_label: COPY::PROVIDE_INSTRUCTIONS_AND_CONTEXT_LABEL, drop_down_label: COPY::VHA_CAMO_ASSIGN_TO_PROGRAM_OFFICE_DROPDOWN_LABEL, type: AssessDocumentationTask.name, redirect_after: "/organizations/#{VhaCamo.singleton.url}" @@ -708,7 +713,7 @@ def bva_intake_return_to_camo(task, _user) modal_title: COPY::BVA_INTAKE_RETURN_TO_CAMO_MODAL_TITLE, modal_body: COPY::BVA_INTAKE_RETURN_TO_CAMO_MODAL_BODY, modal_button_text: COPY::MODAL_RETURN_BUTTON, - instructions_label: COPY::PRE_DOCKET_MODAL_BODY, + instructions_label: COPY::PROVIDE_INSTRUCTIONS_AND_CONTEXT_LABEL, message_title: format(COPY::BVA_INTAKE_RETURN_TO_CAMO_CONFIRMATION_TITLE, task.appeal.veteran_full_name), type: VhaDocumentSearchTask.name, redirect_after: "/organizations/#{queue_url}" @@ -725,7 +730,7 @@ def bva_intake_return_to_caregiver(task, _user) modal_title: COPY::BVA_INTAKE_RETURN_TO_CAREGIVER_MODAL_TITLE, modal_body: COPY::BVA_INTAKE_RETURN_TO_CAREGIVER_MODAL_BODY, modal_button_text: COPY::MODAL_RETURN_BUTTON, - instructions_label: COPY::PRE_DOCKET_MODAL_BODY, + instructions_label: COPY::PROVIDE_INSTRUCTIONS_AND_CONTEXT_LABEL, message_title: format(COPY::BVA_INTAKE_RETURN_TO_CAREGIVER_CONFIRMATION_TITLE, task.appeal.veteran_full_name), type: VhaDocumentSearchTask.name, redirect_after: "/organizations/#{queue_url}" @@ -742,7 +747,7 @@ def bva_intake_return_to_emo(task, _user) modal_title: COPY::BVA_INTAKE_RETURN_TO_EMO_MODAL_TITLE, modal_body: COPY::BVA_INTAKE_RETURN_TO_EMO_MODAL_BODY, modal_button_text: COPY::MODAL_RETURN_BUTTON, - instructions_label: COPY::PRE_DOCKET_MODAL_BODY, + instructions_label: COPY::PROVIDE_INSTRUCTIONS_AND_CONTEXT_LABEL, message_title: format(COPY::BVA_INTAKE_RETURN_TO_EMO_CONFIRMATION_TITLE, task.appeal.veteran_full_name), type: EducationDocumentSearchTask.name, redirect_after: "/organizations/#{queue_url}" @@ -766,7 +771,7 @@ def emo_return_to_board_intake(*) { modal_title: COPY::EMO_RETURN_TO_BOARD_INTAKE_MODAL_TITLE, modal_button_text: COPY::MODAL_RETURN_BUTTON, - instructions_label: COPY::PRE_DOCKET_MODAL_BODY, + instructions_label: COPY::PROVIDE_INSTRUCTIONS_AND_CONTEXT_LABEL, type: EducationDocumentSearchTask.name, redirect_after: "/organizations/#{EducationEmo.singleton.url}" } @@ -778,7 +783,7 @@ def emo_assign_to_education_rpo_data(*) modal_title: COPY::EMO_ASSIGN_TO_RPO_MODAL_TITLE, modal_button_text: COPY::MODAL_ASSIGN_BUTTON, modal_selector_placeholder: COPY::EDUCATION_RPO_SELECTOR_PLACEHOLDER, - instructions_label: COPY::PRE_DOCKET_MODAL_BODY, + instructions_label: COPY::PROVIDE_INSTRUCTIONS_AND_CONTEXT_LABEL, drop_down_label: COPY::EMO_ASSIGN_TO_RPO_MODAL_BODY, type: EducationAssessDocumentationTask.name, redirect_after: "/organizations/#{EducationEmo.singleton.url}", @@ -795,7 +800,7 @@ def education_rpo_return_to_emo(task, _user) COPY::EDUCATION_RPO_RETURN_TO_EMO_CONFIRMATION, task.appeal.veteran_full_name ), - instructions_label: COPY::PRE_DOCKET_MODAL_BODY, + instructions_label: COPY::PROVIDE_INSTRUCTIONS_AND_CONTEXT_LABEL, type: EducationAssessDocumentationTask.name, redirect_after: "/organizations/#{queue_url}", modal_button_text: COPY::MODAL_RETURN_BUTTON diff --git a/app/serializers/intake/legacy_appeal_serializer.rb b/app/serializers/intake/legacy_appeal_serializer.rb index 24d0e361765..d5dda33ca95 100644 --- a/app/serializers/intake/legacy_appeal_serializer.rb +++ b/app/serializers/intake/legacy_appeal_serializer.rb @@ -8,27 +8,25 @@ class Intake::LegacyAppealSerializer attribute :claimant_type do |object| object.claimant[:representative][:type] end - attribute :claimant_name do |object| - object.veteran_full_name - end + attribute :claimant_name, &:veteran_full_name attribute :veteran_is_not_claimant attribute :request_issues, &:issues attribute :intake_user - attribute :processed_in_caseflow do |object| + attribute :processed_in_caseflow do |_object| true end - attribute :legacy_opt_in_approved do |object| + attribute :legacy_opt_in_approved do |_object| true end - attribute :legacy_appeals do |object| + attribute :legacy_appeals do |_object| [] end - attribute :ratings do |object| + attribute :ratings do |_object| [] end @@ -36,27 +34,27 @@ class Intake::LegacyAppealSerializer "/appeals/#{object.id}/edit" end - attribute :processed_at do |object| + attribute :processed_at do |_object| nil end - attribute :veteran_invalid_fields do |object| + attribute :veteran_invalid_fields do |_object| nil end - attribute :active_nonrating_request_issues do |object| + attribute :active_nonrating_request_issues do |_object| [] end - attribute :contestable_issues_by_date do |object| + attribute :contestable_issues_by_date do |_object| [] end - attribute :intake_user do |object| + attribute :intake_user do |_object| nil end - attribute :receipt_date do |object| + attribute :receipt_date do |_object| nil end @@ -81,11 +79,11 @@ class Intake::LegacyAppealSerializer } end - attribute :power_of_attorney_name do |object| + attribute :power_of_attorney_name do |_object| nil end - attribute :claimant_relationship do |object| + attribute :claimant_relationship do |_object| nil end diff --git a/app/serializers/intake/request_issue_serializer.rb b/app/serializers/intake/request_issue_serializer.rb index 8f09f2d3087..263bea4454e 100644 --- a/app/serializers/intake/request_issue_serializer.rb +++ b/app/serializers/intake/request_issue_serializer.rb @@ -8,6 +8,7 @@ class Intake::RequestIssueSerializer attribute :rating_issue_profile_date, &:contested_rating_issue_profile_date attribute :rating_decision_reference_id, &:contested_rating_decision_reference_id attribute :description + attribute :nonrating_issue_description attribute :contention_text attribute :approx_decision_date, &:approx_decision_date_of_issue_being_contested attribute :category, &:nonrating_issue_category diff --git a/app/services/api/v3/decision_reviews/contestable_issue_params.rb b/app/services/api/v3/decision_reviews/contestable_issue_params.rb index ed580c1b888..816a8d2ab35 100644 --- a/app/services/api/v3/decision_reviews/contestable_issue_params.rb +++ b/app/services/api/v3/decision_reviews/contestable_issue_params.rb @@ -84,12 +84,6 @@ def intakes_controller_params private def contestable_issue_finder - # log for potential MST/PACT rating/contention BGS call location - DataDogService.increment_counter( - metric_group: "mst_pact_group", - metric_name: "bgs_service.potential_mst_pact_bgs_call_location", - app_name: RequestStore[:application] - ) @contestable_issue_finder ||= Api::V3::DecisionReviews::ContestableIssueFinder.new( { decision_review_class: @decision_review_class, diff --git a/app/services/deprecation_warnings/base_handler.rb b/app/services/deprecation_warnings/base_handler.rb new file mode 100644 index 00000000000..65b473bfb38 --- /dev/null +++ b/app/services/deprecation_warnings/base_handler.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +# @abstract Subclass and override {.call} to implement a custom DeprecationWarnings handler class. +# @note For use with `ActiveSupport::Deprecation.behavior=`. +module DeprecationWarnings + class BaseHandler + class << self + # Subclasses must respond to `.call` to play nice with `ActiveSupport::Deprecation.behavior=`. + # https://github.com/rails/rails/blob/a4581b53aae93a8dd3205abae0630398cbce9204/activesupport/lib/active_support/deprecation/behaviors.rb#L70-L71 + # :reek:LongParameterList + def call(_message, _callstack, _deprecation_horizon, _gem_name) + fail NotImplementedError + end + + # Subclasses must respond to `.arity` to play nice with `ActiveSupport::Deprecation.behavior=`. + # Must return number of arguments accepted by `.call`. + # https://github.com/rails/rails/blob/a4581b53aae93a8dd3205abae0630398cbce9204/activesupport/lib/active_support/deprecation/behaviors.rb#L101 + def arity + method(:call).arity + end + end + end +end diff --git a/app/services/deprecation_warnings/development_handler.rb b/app/services/deprecation_warnings/development_handler.rb new file mode 100644 index 00000000000..a61d262e5e6 --- /dev/null +++ b/app/services/deprecation_warnings/development_handler.rb @@ -0,0 +1,24 @@ +# frozen_string_literal: true + +require_relative "disallowed_deprecations" + +# @note For use with `ActiveSupport::Deprecation.behavior=`. +module DeprecationWarnings + class DevelopmentHandler < BaseHandler + extend DisallowedDeprecations + + class << self + # :reek:LongParameterList + def call(message, _callstack, _deprecation_horizon, _gem_name) + raise_if_disallowed_deprecation!(message) + emit_warning_to_application_logs(message) + end + + private + + def emit_warning_to_application_logs(message) + Rails.logger.warn(message) + end + end + end +end diff --git a/app/services/deprecation_warnings/disallowed_deprecations.rb b/app/services/deprecation_warnings/disallowed_deprecations.rb new file mode 100644 index 00000000000..79f781ea4cc --- /dev/null +++ b/app/services/deprecation_warnings/disallowed_deprecations.rb @@ -0,0 +1,32 @@ +# frozen_string_literal: true + +# @note Temporary solution for disallowed deprecation warnings. +# To be replaced by ActiveSupport Disallowed Deprecations after upgrading to Rails 6.1: +# https://rubyonrails.org/2020/12/9/Rails-6-1-0-release#disallowed-deprecation-support +module DisallowedDeprecations + class ::DisallowedDeprecationError < StandardError; end + + # Regular expressions for Rails 6.0 deprecation warnings that we have addressed in the codebase + RAILS_6_0_FIXED_DEPRECATION_WARNING_REGEXES = [ + /Dangerous query method \(method whose arguments are used as raw SQL\) called with non\-attribute argument\(s\)/, + /The success\? predicate is deprecated and will be removed in Rails 6\.0/ + ].freeze + + # Regular expressions for Rails 6.1 deprecation warnings that we have addressed in the codebase + RAILS_6_1_FIXED_DEPRECATION_WARNING_REGEXES = [ + /update_attributes is deprecated and will be removed from Rails 6\.1/ + ].freeze + + # Regular expressions for deprecation warnings that should raise an exception on detection + DISALLOWED_DEPRECATION_WARNING_REGEXES = [ + *RAILS_6_0_FIXED_DEPRECATION_WARNING_REGEXES, + *RAILS_6_1_FIXED_DEPRECATION_WARNING_REGEXES + ].freeze + + # @param message [String] deprecation warning message to be checked against disallow list + def raise_if_disallowed_deprecation!(message) + if DISALLOWED_DEPRECATION_WARNING_REGEXES.any? { |re| re.match?(message) } + fail DisallowedDeprecationError, message + end + end +end diff --git a/app/services/deprecation_warnings/production_handler.rb b/app/services/deprecation_warnings/production_handler.rb new file mode 100644 index 00000000000..70a4b7fb44b --- /dev/null +++ b/app/services/deprecation_warnings/production_handler.rb @@ -0,0 +1,55 @@ +# frozen_string_literal: true + +# @note For use with `ActiveSupport::Deprecation.behavior=`. +module DeprecationWarnings + class ProductionHandler < BaseHandler + APP_NAME = "caseflow" + SLACK_ALERT_CHANNEL = "#appeals-deprecation-alerts" + + class << self + # :reek:LongParameterList + def call(message, callstack, deprecation_horizon, gem_name) + emit_warning_to_application_logs(message) + emit_warning_to_sentry(message, callstack, deprecation_horizon, gem_name) + emit_warning_to_slack_alerts_channel(message) + rescue StandardError => error + Raven.capture_exception(error) + end + + private + + def emit_warning_to_application_logs(message) + Rails.logger.warn(message) + end + + # :reek:LongParameterList + def emit_warning_to_sentry(message, callstack, deprecation_horizon, gem_name) + # Pre-emptive bugfix for future versions of the `sentry-raven` gem: + # Need to convert callstack elements from `Thread::Backtrace::Location` objects to `Strings` + # to avoid a `TypeError` on `options.deep_dup` in `Raven.capture_message`: + # https://github.com/getsentry/sentry-ruby/blob/2e07e0295ba83df4c76c7bf3315d199c7050a7f9/lib/raven/instance.rb#L114 + callstack_strings = callstack.map(&:to_s) + + Raven.capture_message( + message, + level: "warning", + extra: { + message: message, + gem_name: gem_name, + deprecation_horizon: deprecation_horizon, + callstack: callstack_strings, + environment: Rails.env + } + ) + end + + def emit_warning_to_slack_alerts_channel(message) + slack_alert_title = "Deprecation Warning - #{APP_NAME} (#{ENV['DEPLOY_ENV']})" + + SlackService + .new(url: ENV["SLACK_DISPATCH_ALERT_URL"]) + .send_notification(message, slack_alert_title, SLACK_ALERT_CHANNEL) + end + end + end +end diff --git a/app/services/deprecation_warnings/test_handler.rb b/app/services/deprecation_warnings/test_handler.rb new file mode 100644 index 00000000000..b87945be19c --- /dev/null +++ b/app/services/deprecation_warnings/test_handler.rb @@ -0,0 +1,24 @@ +# frozen_string_literal: true + +require_relative "disallowed_deprecations" + +# @note For use with `ActiveSupport::Deprecation.behavior=`. +module DeprecationWarnings + class TestHandler < BaseHandler + extend DisallowedDeprecations + + class << self + # :reek:LongParameterList + def call(message, _callstack, _deprecation_horizon, _gem_name) + raise_if_disallowed_deprecation!(message) + emit_error_to_stderr(message) + end + + private + + def emit_error_to_stderr(message) + ActiveSupport::Logger.new($stderr).error(message) + end + end + end +end diff --git a/app/services/external_api/bgs_service.rb b/app/services/external_api/bgs_service.rb index 4170612497d..0b155f89293 100644 --- a/app/services/external_api/bgs_service.rb +++ b/app/services/external_api/bgs_service.rb @@ -3,6 +3,7 @@ require "bgs" # Thin interface to all things BGS +# rubocop:disable Metrics/ClassLength class ExternalApi::BGSService include PowerOfAttorneyMapper include AddressMapper @@ -290,8 +291,7 @@ def can_access?(vbms_id) # persist cache for other objects Rails.cache.write(fetch_veteran_info_cache_key(vbms_id), record, expires_in: 10.minutes) true - rescue BGS::ShareError => error - Raven.capture_exception(error) + rescue BGS::ShareError false end end @@ -303,8 +303,7 @@ def station_conflict?(vbms_id, veteran_participant_id) # sometimes find_flashes works begin client.claimants.find_flashes(vbms_id) - rescue BGS::ShareError => error - Raven.capture_exception(error) + rescue BGS::ShareError return true end @@ -475,8 +474,7 @@ def find_contentions_by_participant_id(participant_id) service: :bgs, name: "contention.find_contention_by_participant_id") do client.contention.find_contention_by_participant_id(participant_id) - rescue BGS::ShareError => error - Raven.capture_exception(error) + rescue BGS::ShareError [] end end @@ -546,3 +544,4 @@ def formatted_start_and_end_dates(start_date, end_date) end # :nocov: end +# rubocop:enable Metrics/ClassLength diff --git a/app/services/metrics_service.rb b/app/services/metrics_service.rb index b2f7ba769c9..aaf8919ed0a 100644 --- a/app/services/metrics_service.rb +++ b/app/services/metrics_service.rb @@ -4,42 +4,93 @@ # see https://dropwizard.github.io/metrics/3.1.0/getting-started/ for abstractions on metric types class MetricsService - def self.record(description, service: nil, name: "unknown") + # rubocop:disable Metrics/AbcSize, Metrics/MethodLength + # :reek:LongParameterList + def self.record(description, service: nil, name: "unknown", caller: nil) return_value = nil app = RequestStore[:application] || "other" service ||= app + uuid = SecureRandom.uuid + metric_name = "request_latency" + sent_to = [[Metric::LOG_SYSTEMS[:rails_console]]] + sent_to_info = nil + start = Time.zone.now Rails.logger.info("STARTED #{description}") stopwatch = Benchmark.measure do return_value = yield end + stopped = Time.zone.now if service latency = stopwatch.real - DataDogService.emit_gauge( + sent_to_info = { metric_group: "service", - metric_name: "request_latency", + metric_name: metric_name, metric_value: latency, app_name: app, attrs: { service: service, - endpoint: name + endpoint: name, + uuid: uuid } - ) + } + DataDogService.emit_gauge(sent_to_info) + + sent_to << Metric::LOG_SYSTEMS[:datadog] end Rails.logger.info("FINISHED #{description}: #{stopwatch}") + + metric_params = { + name: metric_name, + message: description, + type: Metric::METRIC_TYPES[:performance], + product: service, + attrs: { + service: service, + endpoint: name + }, + sent_to: sent_to, + sent_to_info: sent_to_info, + start: start, + end: stopped, + duration: stopwatch.total * 1000 # values is in seconds and we want milliseconds + } + store_record_metric(uuid, metric_params, caller) + return_value rescue StandardError => error - Raven.capture_exception(error) + Rails.logger.error("#{error.message}\n#{error.backtrace.join("\n")}") + Raven.capture_exception(error, extra: { type: "request_error", service: service, name: name, app: app }) + increment_datadog_counter("request_error", service, name, app) if service + metric_params = { + name: "error", + message: error.message, + type: Metric::METRIC_TYPES[:error], + product: "", + attrs: { + service: "", + endpoint: "" + }, + sent_to: [[Metric::LOG_SYSTEMS[:rails_console]]], + sent_to_info: "", + start: "Time not recorded", + end: "Time not recorded", + duration: "Time not recorded" + } + + store_record_metric(uuid, metric_params, caller) + # Re-raise the same error. We don't want to interfere at all in normal error handling. # This is just to capture the metric. raise ensure increment_datadog_counter("request_attempt", service, name, app) if service end + # rubocop:enable Metrics/AbcSize, Metrics/MethodLength private_class_method def self.increment_datadog_counter(metric_name, service, endpoint_name, app_name) DataDogService.increment_counter( @@ -52,4 +103,27 @@ def self.record(description, service: nil, name: "unknown") } ) end + # :reek:ControlParameter + def self.store_record_metric(uuid, params, caller) + return nil unless FeatureToggle.enabled?(:metrics_monitoring, user: RequestStore[:current_user]) + + name = "caseflow.server.metric.#{params[:name]&.downcase&.gsub(/::/, '.')}" + params = { + uuid: uuid, + name: name, + message: params[:message], + type: params[:type], + product: params[:product], + metric_attributes: params[:attrs], + sent_to: params[:sent_to], + sent_to_info: params[:sent_to_info], + start: params[:start], + end: params[:end], + duration: params[:duration] + } + + metric = Metric.create_metric(caller || self, params, RequestStore[:current_user]) + failed_metric_info = metric&.errors.inspect + Rails.logger.info("Failed to create metric #{failed_metric_info}") unless metric&.valid? + end end diff --git a/app/services/stuck_job_report_service.rb b/app/services/stuck_job_report_service.rb new file mode 100644 index 00000000000..e6e03d28ab1 --- /dev/null +++ b/app/services/stuck_job_report_service.rb @@ -0,0 +1,44 @@ +# frozen_string_literal: true + +# StuckJobReportService is a generic shared class that creates the logs +# sent to S3. The logs give the count before the remediation and +# the count after the remediation. + +# The logs also contain the Id of the record that has been updated + +class StuckJobReportService + attr_reader :logs, :folder_name + + S3_FOLDER_NAME = "data-remediation-output" + + def initialize + @logs = ["#{Time.zone.now} ********** Remediation Log Report **********"] + @folder_name = (Rails.deploy_env == :prod) ? S3_FOLDER_NAME : "#{S3_FOLDER_NAME}-#{Rails.deploy_env}" + end + + # Logs the Id and the object that is being updated + def append_single_record(class_name, id) + logs.push("\n#{Time.zone.now} Record Type: #{class_name} - Record ID: #{id}.") + end + + def append_error(class_name, id, error) + logs.push("\n#{Time.zone.now} Record Type: #{class_name}"\ + " - Record ID: #{id}. Encountered #{error}, record not updated.") + end + + # Gets the record count of the record type passed in. + def append_record_count(records_with_errors_count, text) + logs.push("\n#{Time.zone.now} #{text}::Log - Total number of Records with Errors: #{records_with_errors_count}") + end + + def write_log_report(report_text) + create_file_name = report_text.split.join("-").downcase + upload_logs(create_file_name) + end + + def upload_logs(create_file_name) + content = logs.join("\n") + file_name = "#{create_file_name}-logs/#{create_file_name}-log-#{Time.zone.now}" + S3Service.store_file("#{folder_name}/#{file_name}", content) + end +end diff --git a/app/views/certification_stats/show.html.erb b/app/views/certification_stats/show.html.erb deleted file mode 100644 index aa9038a49c0..00000000000 --- a/app/views/certification_stats/show.html.erb +++ /dev/null @@ -1,172 +0,0 @@ -<% content_for :page_title do stats_header end %> - -<% content_for :head do %> - <%= javascript_include_tag 'stats' %> - -<% end %> - -
-

Certification Dashboard

-
-
    - <% CertificationStats::INTERVALS.each do |interval| %> -
  • "> - - - <%= link_to interval.to_s.capitalize, certification_stats_path(interval) %> - - -
  • - <% end %> -
- -
-

Activity

-
-

- Certifications Started -

-
- <%= @stats[0].values[:certifications_started] %> -
-
- -
-

- Certifications Completed -

-
- <%= @stats[0].values[:certifications_completed] %> -
-
-
- -
-

Certification Rate

- -
-

- Overall -

-
- <%= format_rate_stat(:same_period_completions, :certifications_started) %> -
-
- -
-

- Missing Document -

-
- <%= format_rate_stat(:missing_doc_same_period_completions, :missing_doc) %> -
-
-
- -
-

Time to Certify

-
-
-

- Overall (median) -

-
- <%= format_time_duration_stat(@stats[0].values[:median_time_to_certify]) %> -
-
-
-

- Overall (95th percentile) -

-
- <%= format_time_duration_stat(@stats[0].values[:time_to_certify]) %> -
-
-
- -
-
-

- Missing Document (median) -

-
- <%= format_time_duration_stat(@stats[0].values[:median_missing_doc_time_to_certify]) %> -
-
-
-

- Missing Document (95th percentile) -

-
- <%= format_time_duration_stat(@stats[0].values[:missing_doc_time_to_certify]) %> -
-
-
-
- -
-

Missing Documents

- -
-

- Any Document -

-
- <%= format_rate_stat(:missing_doc, :certifications_started) %> -
-
- -
-

- NOD -

-
- <%= format_rate_stat(:missing_nod, :certifications_started) %> -
-
- -
-

- SOC -

-
- <%= format_rate_stat(:missing_soc, :certifications_started) %> -
-
-
- -
- -
-

- SSOC -

-
- <%= format_rate_stat(:missing_ssoc, :ssoc_required) %> -
-
- -
-

- Form 9 -

-
- <%= format_rate_stat(:missing_form9, :certifications_started) %> -
-
-
-
-
diff --git a/app/views/certifications/v2.html.erb b/app/views/certifications/v2.html.erb index 6b739da67b0..8634f07ea5d 100644 --- a/app/views/certifications/v2.html.erb +++ b/app/views/certifications/v2.html.erb @@ -4,6 +4,9 @@ dropdownUrls: dropdown_urls, feedbackUrl: feedback_url, buildDate: build_date, - vacolsId: @certification.vacols_id + vacolsId: @certification.vacols_id, + featureToggles: { + metricsBrowserError: FeatureToggle.enabled?(:metrics_browser_error, user: current_user) + } }) %> <% end %> diff --git a/app/views/decision_reviews/index.html.erb b/app/views/decision_reviews/index.html.erb index 548d1cdb9ad..8e276a690a0 100644 --- a/app/views/decision_reviews/index.html.erb +++ b/app/views/decision_reviews/index.html.erb @@ -10,10 +10,18 @@ businessLine: business_line.name, businessLineUrl: business_line.url, featureToggles: { - decisionReviewQueueSsnColumn: FeatureToggle.enabled?(:decision_review_queue_ssn_column, user: current_user) + decisionReviewQueueSsnColumn: FeatureToggle.enabled?(:decision_review_queue_ssn_column, user: current_user), + metricsBrowserError: FeatureToggle.enabled?(:metrics_browser_error, user: current_user) }, + poaAlert: {}, baseTasksUrl: business_line.tasks_url, + businessLineConfig: business_line_config_options, taskFilterDetails: task_filter_details + }, + ui: { + featureToggles: { + poa_button_refresh: FeatureToggle.enabled?(:poa_button_refresh) + } } }) %> <% end %> diff --git a/app/views/decision_reviews/show.html.erb b/app/views/decision_reviews/show.html.erb index 7edd5b90fc4..9e269261b89 100644 --- a/app/views/decision_reviews/show.html.erb +++ b/app/views/decision_reviews/show.html.erb @@ -8,12 +8,23 @@ serverNonComp: { businessLine: business_line.name, businessLineUrl: business_line.url, + businessLineConfig: business_line_config_options, baseTasksUrl: business_line.tasks_url, taskFilterDetails: task_filter_details, task: task.ui_hash, appeal: task.appeal_ui_hash, + poaAlert: {}, featureToggles: { decisionReviewQueueSsnColumn: FeatureToggle.enabled?(:decision_review_queue_ssn_column, user: current_user) + }, + loadingPowerOfAttorney: { + loading: false, + error: false + }, + ui: { + featureToggles: { + poa_button_refresh: FeatureToggle.enabled?(:poa_button_refresh) + } } } }) %> diff --git a/app/views/dispatch/establish_claims/index.html.erb b/app/views/dispatch/establish_claims/index.html.erb index 8a9cc4d7d64..3c8c256783a 100644 --- a/app/views/dispatch/establish_claims/index.html.erb +++ b/app/views/dispatch/establish_claims/index.html.erb @@ -8,6 +8,9 @@ buildDate: build_date, buttonText: start_text, userQuota: user_quota && user_quota.to_hash, - currentUserHistoricalTasks: current_user_historical_tasks.map(&:to_hash) + currentUserHistoricalTasks: current_user_historical_tasks.map(&:to_hash), + featureToggles: { + metricsBrowserError: FeatureToggle.enabled?(:metrics_browser_error, user: current_user) + } }) %> -<% end %> \ No newline at end of file +<% end %> diff --git a/app/views/dispatch_stats/show.html.erb b/app/views/dispatch_stats/show.html.erb deleted file mode 100644 index cec48c88821..00000000000 --- a/app/views/dispatch_stats/show.html.erb +++ /dev/null @@ -1,242 +0,0 @@ -<% content_for :page_title do stats_header end %> - -<% content_for :head do %> - <%= javascript_include_tag 'stats' %> - -<% end %> - -
-

Dispatch Dashboard

-
-
    - <% DispatchStats::INTERVALS.each do |interval| %> -
  • "> - - - <%= link_to interval.to_s.capitalize, dispatch_stats_path(interval) %> - - -
  • - <% end %> -
- -
-

Establish Claim Tasks Identified from VACOLS

- -
-

- All -

-
- <%= @stats[0].values[:establish_claim_identified] %> -
-
-
-

- Full Grants -

-
- <%= @stats[0].values[:establish_claim_identified_full_grant] %> -
-
-
-

- Partial Grants & Remands -

-
- <%= @stats[0].values[:establish_claim_identified_partial_grant_remand] %> -
-
-
- - -
-

Establish Claim Task Activity

- -
-

- Active Users -

-
- <%= @stats[0].values[:establish_claim_active_users] %> -
-
-
-

- Establish Claim Tasks Started -

-
- <%= @stats[0].values[:establish_claim_started] %> -
-
-
-

- Establish Claim Tasks Completed -

-
- <%= @stats[0].values[:establish_claim_completed_success] %> -
-
-
- -
-

Establish Claim Task Completion Rate

- -
-

- All -

-
- <%= format_rate_stat(:establish_claim_completed_success, :establish_claim_completed) %> -
-
-
-

- Full Grants -

-
- <%= format_rate_stat(:establish_claim_completed_success_full_grant, :establish_claim_full_grant_completed) %> -
-
-
-

- Partial Grants & Remands -

-
- <%= format_rate_stat(:establish_claim_completed_success_partial_grant_remand, :establish_claim_partial_grant_remand_completed) %> -
-
-
- -
-

Time to Claim Establishment

- -
-
-

- All (median) -

-
- <%= format_time_duration_stat(@stats[0].values[:median_time_to_establish_claim]) %> -
-
-
-

- All (95th percentile) -

-
- <%= format_time_duration_stat(@stats[0].values[:time_to_establish_claim]) %> -
-
-
-
-
-

- Full Grants (median) -

-
- <%= format_time_duration_stat(@stats[0].values[:median_time_to_establish_claim_full_grants]) %> -
-
-
-

- Full Grants (95th percentile) -

-
- <%= format_time_duration_stat(@stats[0].values[:time_to_establish_claim_full_grants]) %> -
-
-
-
-
-

- Partial Grants & Remands (median) -

-
- <%= format_time_duration_stat(@stats[0].values[:median_time_to_establish_claim_partial_grants_remands]) %> -
-
-
-

- Partial Grants & Remands (95th percentile) -

-
- <%= format_time_duration_stat(@stats[0].values[:time_to_establish_claim_partial_grants_remands]) %> -
-
-
-
- -
-

Establish Claim Tasks Canceled

- -
-

- All -

-
- <%= @stats[0].values[:establish_claim_canceled] %> -
-
-
-

- Full Grants -

-
- <%= @stats[0].values[:establish_claim_canceled_full_grant] %> -
-
-
-

- Partial Grants & Remands -

-
- <%= @stats[0].values[:establish_claim_canceled_partial_grant_remand] %> -
-
-
- -
-

Establish Claim Tasks with Decisions Uploaded to VBMS

- -
-

- All -

-
- <%= @stats[0].values[:establish_claim_identified] %> -
-
-
-

- Full Grants -

-
- <%= @stats[0].values[:establish_claim_identified_full_grant] %> -
-
-
-

- Partial Grants & Remands -

-
- <%= @stats[0].values[:establish_claim_identified_partial_grant_remand] %> -
-
-
- -
-
diff --git a/app/views/hearings/index.html.erb b/app/views/hearings/index.html.erb index b841733926f..32d74ab86a9 100644 --- a/app/views/hearings/index.html.erb +++ b/app/views/hearings/index.html.erb @@ -32,6 +32,9 @@ userIsDvc: current_user.can_view_judge_team_management?, userIsHearingManagement: current_user.in_hearing_management_team?, userIsBoardAttorney: current_user.attorney?, - userIsHearingAdmin: current_user.in_hearing_admin_team? + userIsHearingAdmin: current_user.in_hearing_admin_team?, + featureToggles: { + metricsBrowserError: FeatureToggle.enabled?(:metrics_browser_error, user: current_user) + } }) %> <% end %> diff --git a/app/views/inbox/index.html.erb b/app/views/inbox/index.html.erb index 65ba31a93e8..dba5d4f67ae 100644 --- a/app/views/inbox/index.html.erb +++ b/app/views/inbox/index.html.erb @@ -8,6 +8,9 @@ inbox: { messages: messages, pagination: pagination + }, + featureToggles: { + metricsBrowserError: FeatureToggle.enabled?(:metrics_browser_error, user: current_user) } }) %> <% end %> diff --git a/app/views/intake_manager/index.html.erb b/app/views/intake_manager/index.html.erb index 09ed9a2c07e..c575c5c01a9 100644 --- a/app/views/intake_manager/index.html.erb +++ b/app/views/intake_manager/index.html.erb @@ -4,6 +4,9 @@ selectedUser: user_css_id || "", dropdownUrls: dropdown_urls, feedbackUrl: feedback_url, - buildDate: build_date + buildDate: build_date, + featureToggles: { + metricsBrowserError: FeatureToggle.enabled?(:metrics_browser_error, user: current_user) + } }) %> <% end %> diff --git a/app/views/metrics/dashboard/show.html.erb b/app/views/metrics/dashboard/show.html.erb new file mode 100644 index 00000000000..32924ca5652 --- /dev/null +++ b/app/views/metrics/dashboard/show.html.erb @@ -0,0 +1,90 @@ + + + + + + +

Metrics Dashboard

+

Shows metrics created in past hour

+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + <% @metrics.each do |metric| %> + + + + + + + + + + + + + + + + + + + + + <% end %> + +
uuidnameclassgroupmessagetypeproductappattributesadditional_infosent_tosent_to_inforelevant_tables_infostartendduration (ms)css_idcreated_at
<%= metric.uuid %><%= metric.metric_name %><%= metric.metric_class %><%= metric.metric_group %><%= metric.metric_message %><%= metric.metric_type %><%= metric.metric_product %><%= metric.app_name %><%= metric.metric_attributes %><%= metric.additional_info %><%= metric.sent_to %><%= metric.sent_to_info %><%= metric.relevant_tables_info %><%= metric.start %><%= metric.end %><%= metric.duration %><%= metric.css_id %><%= metric.created_at %>
+
+
diff --git a/app/views/queue/index.html.erb b/app/views/queue/index.html.erb index a1eca8f69bb..27096a90071 100644 --- a/app/views/queue/index.html.erb +++ b/app/views/queue/index.html.erb @@ -57,7 +57,9 @@ legacy_mst_pact_identification: FeatureToggle.enabled?(:legacy_mst_pact_identification, user: current_user), justification_reason: FeatureToggle.enabled?(:justification_reason, user: current_user), cc_appeal_workflow: FeatureToggle.enabled?(:cc_appeal_workflow, user: current_user), - cc_vacatur_visibility: FeatureToggle.enabled?(:cc_vacatur_visibility, user: current_user) + metricsBrowserError: FeatureToggle.enabled?(:metrics_browser_error, user: current_user), + cc_vacatur_visibility: FeatureToggle.enabled?(:cc_vacatur_visibility, user: current_user), + additional_remand_reasons: FeatureToggle.enabled?(:additional_remand_reasons, user: current_user) } }) %> <% end %> diff --git a/app/views/reader/appeal/index.html.erb b/app/views/reader/appeal/index.html.erb index f57c2c57ca4..251d6f2a559 100644 --- a/app/views/reader/appeal/index.html.erb +++ b/app/views/reader/appeal/index.html.erb @@ -8,11 +8,22 @@ page: "DecisionReviewer", feedbackUrl: feedback_url, efolderExpressUrl: efolder_express_url, + userHasEfolderRole: current_user.can?('Download eFolder'), featureToggles: { interfaceVersion2: FeatureToggle.enabled?(:interface_version_2, user: current_user), windowSlider: FeatureToggle.enabled?(:window_slider, user: current_user), readerSelectorsMemoized: FeatureToggle.enabled?(:bulk_upload_documents, user: current_user), - readerGetDocumentLogging: FeatureToggle.enabled?(:reader_get_document_logging, user: current_user) + readerGetDocumentLogging: FeatureToggle.enabled?(:reader_get_document_logging, user: current_user), + metricsLogRestError: FeatureToggle.enabled?(:metrics_log_rest_error, user: current_user), + metricsBrowserError: FeatureToggle.enabled?(:metrics_browser_error, user: current_user), + metricsLoadScreen: FeatureToggle.enabled?(:metrics_load_screen, user: current_user), + metricsRecordPDFJSGetDocument: FeatureToggle.enabled?(:metrics_get_pdfjs_doc, user: current_user), + metricsReaderRenderText: FeatureToggle.enabled?(:metrics_reader_render_text, user: current_user), + metricsLogRestSuccess: FeatureToggle.enabled?(:metrics_log_rest_success, user: current_user), + metricsPdfStorePages: FeatureToggle.enabled?(:metrics_pdf_store_pages, user: current_user), + pdfPageRenderTimeInMs: FeatureToggle.enabled?(:pdf_page_render_time_in_ms, user: current_user), + prefetchDisabled: FeatureToggle.enabled?(:prefetch_disabled, user: current_user), + readerSearchImprovements: FeatureToggle.enabled?(:reader_search_improvements, user: current_user) }, buildDate: build_date }) %> diff --git a/app/views/stats/show.html.erb b/app/views/stats/show.html.erb deleted file mode 100644 index df678f57f43..00000000000 --- a/app/views/stats/show.html.erb +++ /dev/null @@ -1,11 +0,0 @@ -<% content_for :page_title do %>  >  Stats<% end %> - -<% content_for :full_page_content do %> - <%= react_component("StatsContainer", props: { - page: "StatsContainer", - userDisplayName: current_user.display_name, - dropdownUrls: dropdown_urls, - feedbackUrl: feedback_url, - buildDate: build_date - }) %> -<% end %> diff --git a/app/views/test/users/index.html.erb b/app/views/test/users/index.html.erb index f8a29402c45..3bb0dff6ff5 100644 --- a/app/views/test/users/index.html.erb +++ b/app/views/test/users/index.html.erb @@ -14,6 +14,10 @@ appSelectList: Test::UsersController::APPS, userSession: user_session, timezone: { getlocal: Time.now.getlocal.zone, zone: Time.zone.name }, - epTypes: ep_types + epTypes: ep_types, + featureToggles: { + interfaceVersion2: FeatureToggle.enabled?(:interface_version_2, user: current_user), + metricsBrowserError: FeatureToggle.enabled?(:metrics_browser_error, user: current_user) + } }) %> <% end %> diff --git a/app/workflows/contestable_issue_generator.rb b/app/workflows/contestable_issue_generator.rb index bc5f3534ddc..5dc40a9e926 100644 --- a/app/workflows/contestable_issue_generator.rb +++ b/app/workflows/contestable_issue_generator.rb @@ -1,9 +1,8 @@ # frozen_string_literal: true class ContestableIssueGenerator - def initialize(review, get_special_issues: false) + def initialize(review) @review = review - @get_special_issues = get_special_issues end delegate :finalized_decision_issues_before_receipt_date, to: :review @@ -82,11 +81,7 @@ def rating_decisions def rating_hash_deserialize(from:, to:) ratings.inject([]) do |result, rating_hash| - result + rating_hash[from].map do |hash| - # merge in special issues to the hash - hash = hash.merge(special_issues: to.deserialize_special_issues(hash)) if @get_special_issues - to.deserialize(hash) - end + result + rating_hash[from].map { |hash| to.deserialize(hash) } end end diff --git a/app/workflows/initial_tasks_factory.rb b/app/workflows/initial_tasks_factory.rb index 88b6cc66ffa..6c642bee735 100644 --- a/app/workflows/initial_tasks_factory.rb +++ b/app/workflows/initial_tasks_factory.rb @@ -21,6 +21,7 @@ def initialize(appeal) STATE_CODES_REQUIRING_TRANSLATION_TASK = %w[VI VQ PR PH RP PI].freeze + # rubocop:disable Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity def create_root_and_sub_tasks! # if changes to mst or pact, create IssueUpdateTask if (@appeal.mst? && FeatureToggle.enabled?(:mst_identification, user: RequestStore[:current_user])) || @@ -36,6 +37,7 @@ def create_root_and_sub_tasks! end maybe_create_translation_task end + # rubocop:enable Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity private @@ -48,7 +50,6 @@ def create_vso_tracking_tasks end # rubocop:disable Metrics/CyclomaticComplexity - # rubocop:disable Metrics/PerceivedComplexity def create_subtasks! distribution_task # ensure distribution_task exists if @appeal.appellant_substitution? @@ -72,7 +73,6 @@ def create_subtasks! end end # rubocop:enable Metrics/CyclomaticComplexity - # rubocop:enable Metrics/PerceivedComplexity def distribution_task @distribution_task ||= @appeal.tasks.open.find_by(type: :DistributionTask) || @@ -89,13 +89,15 @@ def send_initial_notification_letter when "direct_review" parent_task = distribution_task end - @send_initial_notification_letter ||= @appeal.tasks.open.find_by(type: :SendInitialNotificationLetterTask) || - SendInitialNotificationLetterTask.create!( - appeal: @appeal, - parent: parent_task, - assigned_to: Organization.find_by_url("clerk-of-the-board"), - assigned_by: RequestStore[:current_user] - ) unless parent_task.nil? + unless parent_task.nil? + @send_initial_notification_letter ||= @appeal.tasks.open.find_by(type: :SendInitialNotificationLetterTask) || + SendInitialNotificationLetterTask.create!( + appeal: @appeal, + parent: parent_task, + assigned_to: Organization.find_by_url("clerk-of-the-board"), + assigned_by: RequestStore[:current_user] + ) + end end def create_ihp_task diff --git a/bin/setup b/bin/setup index f4cd9a86308..8c5150054a6 100755 --- a/bin/setup +++ b/bin/setup @@ -2,6 +2,7 @@ # frozen_string_literal: true require "fileutils" +include FileUtils # rubocop:disable Style/MixinUsage # path to your application root. APP_ROOT = File.expand_path("..", __dir__) @@ -10,7 +11,7 @@ def system!(*args) system(*args) || abort("\n== Command #{args} failed ==") end -Dir.chdir APP_ROOT do +chdir APP_ROOT do # This script is a starting point to setup your application. # Add necessary setup steps to this file. diff --git a/bin/update b/bin/update index 74cb785c033..88bc67c037b 100755 --- a/bin/update +++ b/bin/update @@ -1,17 +1,17 @@ #!/usr/bin/env ruby # frozen_string_literal: true -require "pathname" require "fileutils" +include FileUtils # rubocop:disable Style/MixinUsage # path to your application root. -APP_ROOT = Pathname.new File.expand_path("..", __dir__) +APP_ROOT = File.expand_path("..", __dir__) def system!(*args) system(*args) || abort("\n== Command #{args} failed ==") end -Dir.chdir APP_ROOT do +chdir APP_ROOT do # This script is a way to update your development environment automatically. # Add necessary update steps to this file. @@ -19,6 +19,9 @@ Dir.chdir APP_ROOT do system! "gem install bundler --conservative" system("bundle check") || system!("bundle install") + # Install JavaScript dependencies if using Yarn + # system('bin/yarn') + puts "\n== Updating database ==" system! "bin/rails db:migrate" diff --git a/bin/yarn b/bin/yarn index 62ad0fa3828..49affd28ae3 100755 --- a/bin/yarn +++ b/bin/yarn @@ -1,11 +1,10 @@ #!/usr/bin/env ruby # frozen_string_literal: true - -VENDOR_PATH = File.expand_path("..", __dir__) -Dir.chdir(VENDOR_PATH) do - exec "yarnpkg #{ARGV.join(' ')}" +APP_ROOT = File.expand_path("..", __dir__) # rubocop:disable Layout/EmptyLineAfterMagicComment +Dir.chdir(APP_ROOT) do + exec "yarnpkg", *ARGV rescue Errno::ENOENT - warn "Yarn executable was not detected in the system." - warn "Download Yarn at https://yarnpkg.com/en/docs/install" + $stderr.puts "Yarn executable was not detected in the system." # rubocop:disable Style/StderrPuts + $stderr.puts "Download Yarn at https://yarnpkg.com/en/docs/install" # rubocop:disable Style/StderrPuts exit 1 end diff --git a/client/COPY.json b/client/COPY.json index 32d38616fe3..ea03c2b743f 100644 --- a/client/COPY.json +++ b/client/COPY.json @@ -59,6 +59,7 @@ "AGE_MAX_ERR": "Appellant cannot be older than 118 years of age." }, "SSN_INVALID_ERR": "Please enter a valid social security number that follows the format: 123-45-6789 or 123456789", + "EIN_INVALID_ERR": "Please enter a valid employer identification number that follows the format: 12-3456789 or 123456789", "NULL_FILTER_LABEL": "<>", "OTHER_REVIEWS_TABLE_TITLE": "Higher Level Reviews & Supplemental Claims", "OTHER_REVIEWS_TABLE_EP_CODE_COLUMN_TITLE": "EP Codes", @@ -130,9 +131,11 @@ "CASE_DETAILS_HEARING_ON_OTHER_APPEAL_LINK": "View all cases", "CASE_DETAILS_HEARING_ON_OTHER_APPEAL_POST_LINK": " to see other cases associated with this Veteran.", "CASE_DETAILS_UNRECOGNIZED_POA": "This POA is not listed in VBMS. To update this information, please submit an admin action to the VLJ Support team.", + "CASE_DETAILS_UNRECOGNIZED_POA_VHA": "This POA is not listed in VBMS.", "CASE_DETAILS_UNRECOGNIZED_APPELLANT": "This appellant is not listed in VBMS. To update this information, please edit directly in Caseflow.", "CASE_DETAILS_UNRECOGNIZED_ATTORNEY_APPELLANT": "This appellant data comes from VBMS. To edit this information, please submit an action to the VLJ Support team.", "CASE_DETAILS_NO_POA": "VA Form 21-22 was not received at Intake. To add the appellant's POA, please submit an admin action to the VLJ Support team.", + "CASE_DETAILS_NO_POA_VHA": "No known POA.", "CASE_DETAILS_VETERAN_ADDRESS_SOURCE": "Veteran information comes from VBMS. To update the veteran's information, please send a request to the VLJ support staff.", "CASE_DETAILS_UNABLE_TO_LOAD": "We're unable to load this information. If the problem persists, please submit feedback through Caseflow", "CASE_DETAILS_LOADING": "Loading...", @@ -141,6 +144,7 @@ "CASE_DETAILS_POA_ATTORNEY": "Private Attorney", "CASE_DETAILS_POA_LAST_SYNC_DATE_COPY": "POA last refreshed on %(poaSyncDate)s", "CASE_DETAILS_POA_EXPLAINER": "Power of Attorney (POA) data comes from VBMS. To update the POA information stored in VBMS, please send a task to the VLJ support management branch.", + "CASE_DETAILS_POA_EXPLAINER_VHA": "Power of Attorney (POA) data comes from VBMS.", "CASE_DETAILS_POA_SUBSTITUTE": "Appellant's Power of Attorney", "CASE_DETAILS_POA_REFRESH_BUTTON_EXPLANATION": "To retrieve the latest POA information, please click the \"Refresh POA\" button.", "CASE_DETAILS_EDIT_NOD_DATE_LINK_COPY": "Edit NOD Date", @@ -208,7 +212,6 @@ "CAVC_ALL_ISSUES_ERROR": "Please select all issues to proceed", "CAVC_FEDERAL_CIRCUIT_HEADER": "Notice of Appeal to the Federal Circuit", "CAVC_FEDERAL_CIRCUIT_LABEL": "Yes, this case has been appealed to the Federal Circuit", - "CAVC_INSTRUCTIONS_LABEL": "Provide context and instructions for this action", "CAVC_INSTRUCTIONS_ERROR": "Please provide context and instructions for the remand", "CAVC_REMAND_CREATED_TITLE": "You have successfully created a CAVC remand case", "CAVC_DASHBOARD_ENTRY_CREATED_TITLE": "You have successfully created an entry in the CAVC dashboard", @@ -360,7 +363,6 @@ "JUDGE_ADDRESS_MTV_SUCCESS_DETAIL_DENIED": "This task will be completed by the original motions attorney or placed in the team's queue", "RETURN_TO_LIT_SUPPORT_MODAL_TITLE": "Return to Litigation Support", "RETURN_TO_LIT_SUPPORT_MODAL_CONTENT": "Use this action to return the Motion to Vacate task to the previous Motions Attorney in order to request changes to the ruling letter draft, or if the ruling letter draft is missing.\n\nIf the previous attorney is inactive, this will return to the Litigation Support team queue for reassignment.", - "RETURN_TO_LIT_SUPPORT_MODAL_INSTRUCTIONS_LABEL": "Provide context and instructions for this action", "RETURN_TO_LIT_SUPPORT_MODAL_DEFAULT_INSTRUCTIONS": "I am missing a link to the draft ruling letter. Please resubmit so I can review and sign.", "RETURN_TO_LIT_SUPPORT_SUCCESS_TITLE": "%s's Motion to Vacate has been returned to Litigation Support", "RETURN_TO_LIT_SUPPORT_SUCCESS_DETAIL": "This task will be completed by the original Motions Attorney or placed in the team's queue", @@ -383,7 +385,6 @@ "MTV_CHECKOUT_RETURN_TO_JUDGE_ALERT_TITLE": "Prior decision issues marked for vacatur", "MTV_CHECKOUT_RETURN_TO_JUDGE_MODAL_TITLE": "Return to Judge", "MTV_CHECKOUT_RETURN_TO_JUDGE_MODAL_DESCRIPTION": "Use this action to return the Motion to Vacate task to your judge if you believe there is an error in the issues that have been marked for vacatur.\n\nIf your judge is unavailable, this will return to the Litigation Support team queue for reassignment.", - "MTV_CHECKOUT_RETURN_TO_JUDGE_MODAL_INSTRUCTIONS_LABEL": "Provide instructions and context for this action", "MTV_CHECKOUT_RETURN_TO_JUDGE_SUCCESS_TITLE": "%s's Motion to Vacate has been returned to %s", "MTV_CHECKOUT_RETURN_TO_JUDGE_SUCCESS_DETAILS": "If you made a mistake, please email your judge to resolve the issue.", @@ -597,7 +598,6 @@ "SCHEDULE_LATER_DISPLAY_TEXT": "Send to Schedule Veteran list", "ADD_COLOCATED_TASK_SUBHEAD": "Submit admin action", "ADD_COLOCATED_TASK_ACTION_TYPE_LABEL": "Select the type of administrative action you'd like to assign:", - "ADD_COLOCATED_TASK_INSTRUCTIONS_LABEL": "Provide instructions and context for this action", "ADD_COLOCATED_TASK_ANOTHER_BUTTON_LABEL": "+ Add another action", "ADD_COLOCATED_TASK_REMOVE_BUTTON_LABEL": "Remove this action", "ADD_COLOCATED_TASK_SUBMIT_BUTTON_LABEL": "Assign Action", @@ -667,6 +667,9 @@ "CREATE_MAIL_TASK_TITLE": "Create new mail task", "MAIL_TASK_DROPDOWN_TYPE_SELECTOR_LABEL": "Select correspondence type", "MAIL_TASK_CREATION_SUCCESS_MESSAGE": "Created %s task", + "EFOLDER_DOCUMENT_NOT_FOUND": "This document could not be found in eFolder.", + "EFOLDER_INVALID_LINK_FORMAT": "This link format is invalid.", + "EFOLDER_CONNECTION_ERROR": "Error contacting VBMS, please click 'Retry'", "SELF_ASSIGNED_MAIL_TASK_CREATION_SUCCESS_TITLE": "You have successfully created a new %s mail task", "SELF_ASSIGNED_MAIL_TASK_CREATION_SUCCESS_MESSAGE": "This task will remain in your Queue. Assign or take action on this at any time through your actions dropdown.", "CASE_TIMELINE_HEADER": "Below is the history of this case.", @@ -818,6 +821,7 @@ "EXTENSION_REQUEST_MAIL_TASK_LABEL": "Extension request", "FOIA_REQUEST_MAIL_TASK_LABEL": "FOIA request", "HEARING_RELATED_MAIL_TASK_LABEL": "Hearing-related", + "HEARING_POSTPONEMENT_REQUEST_MAIL_TASK_LABEL": "Hearing postponement request", "OTHER_MOTION_MAIL_TASK_LABEL": "Other motion", "POWER_OF_ATTORNEY_MAIL_TASK_LABEL": "Power of attorney-related", "PRIVACY_ACT_REQUEST_MAIL_TASK_LABEL": "Privacy act request", @@ -885,6 +889,7 @@ "CORRECT_REQUEST_ISSUES_LINK": "Correct issues", "CORRECT_REQUEST_ISSUES_WITHDRAW": "Withdraw", "CORRECT_REQUEST_ISSUES_SAVE": "Save", + "CORRECT_REQUEST_ISSUES_ESTABLISH": "Establish", "CORRECT_REQUEST_ISSUES_SPLIT_APPEAL": "Split appeal", "CORRECT_REQUEST_ISSUES_REMOVE_VBMS_TITLE": "Remove review?", "CORRECT_REQUEST_ISSUES_REMOVE_VBMS_TEXT": "This will remove the review and cancel all the End Products associated with it.", @@ -919,6 +924,7 @@ "ADD_CLAIMANT_CONFIRM_MODAL_TITLE": "Review and confirm claimant information", "ADD_CLAIMANT_CONFIRM_MODAL_DESCRIPTION": "Please review the claimant and their POA's information (if applicable) to ensure it matches the form(s). If you need to make edits, please click \"cancel and edit\" and make the edits accordingly.", "ADD_CLAIMANT_CONFIRM_MODAL_NO_POA": "Intake does not have a Form 21-22", + "VHA_NO_POA": "No known POA", "ADD_CLAIMANT_CONFIRM_MODAL_LAST_NAME_ALERT": "We noticed that you didn't enter a last name for the claimant. Are you sure they haven't included a last name?", "ADD_CLAIMANT_MODAL_TITLE": "Add Claimant", "ADD_CLAIMANT_MODAL_DESCRIPTION": "To add a claimant, select their relationship to the Veteran and type to search for their name. **Please note:** at this time, you are only able to add attorneys as claimants.\n\nIf you are unable to find the attorney in the list of names below, please cancel the intake and [email](mailto:VACaseflowIntake@va.gov) for assistance. Remember to encrypt any emails that contain PII.", @@ -927,6 +933,7 @@ "UPDATE_POA_PAGE_DESCRIPTION": "Add the appellant’s POA information based on the VA Form 21-22, so they can be notified of any correspondence sent to the claimant. If you are unable to find their name in the list of options, please select \"Name not listed\" and add their information accordingly.", "INTAKE_EDIT_WITHDRAW_DATE": "Please include the date the withdrawal was requested", "INTAKE_WITHDRAWN_BANNER": "This review will be withdrawn. You can intake these issues as a different type of decision review, if that was requested.", + "CLAIM_REVIEW_WITHDRAWN_MESSAGE": "You have successfully withdrawn a review.", "INTAKE_RATING_MAY_BE_PROCESS": "Rating may be in progress", "INTAKE_VETERAN_PAY_GRADE_INVALID": "Please check the Veteran's pay grade data in VBMS or SHARE to ensure all values are valid and try again.", "INTAKE_CONTENTION_HAS_EXAM_REQUESTED": "A medical exam is requested. Issue cannot be removed.", @@ -940,7 +947,7 @@ "INTAKE_REQUEST_ISSUE_UNTIMELY": "Please note: The issue requested isn't usually eligible because its decision date is older than what's allowed.", "INTAKE_LEGACY_ISSUE_UNTIMELY": "Please note: The legacy issue isn't eligible for SOC/SSOC opt-in unless an exemption has been requested for reasons related to good cause.", "INTAKE_REQUEST_ISSUE_AND_LEGACY_ISSUE_UNTIMELY": "Please note: The issue isn't usually eligible because its decision date is older than what is allowed, and the legacy issue issue isn't eligible for SOC/SSOC opt-in unless an exemption has been requested for reasons related to good cause.", - "INTAKE_ADD_EDIT_SPECIAL_ISSUES_LABEL": "Special issues: ", + "INTAKE_ADD_EDIT_SPECIAL_ISSUES_LABEL": "Special Issues: ", "INTAKE_EDIT_ISSUE_TITLE": "Edit issue", "INTAKE_EDIT_ISSUE_SELECT_SPECIAL_ISSUES": "Select any special issues that apply", "INTAKE_EDIT_ISSUE_CHANGE_MESSAGE": "Why was this change made?", @@ -969,6 +976,7 @@ "VHA_CAMO_PRE_DOCKET_INTAKE_SUCCESS_TITLE": "Appeal recorded and sent to VHA CAMO for document assessment", "VHA_CAREGIVER_SUPPORT_PRE_DOCKET_INTAKE_SUCCESS_TITLE": "Appeal recorded and sent to VHA Caregiver for document assessment", + "VHA_NO_DECISION_DATE_BANNER": "This claim will be saved, but cannot be worked on until a decision date is added to this issue.", "EDUCATION_PRE_DOCKET_INTAKE_SUCCESS_TITLE": "Appeal recorded and sent to Education Service for document assessment", "PRE_DOCKET_INTAKE_SUCCESS_TITLE": "Appeal recorded in pre-docket queue", "INTAKE_SUCCESS_TITLE": "Intake completed", @@ -1215,6 +1223,7 @@ "VHA_CAMO_ASSIGN_TO_REGIONAL_OFFICE_DROPDOWN_LABEL_VAMC": "VA Medical Center", "VHA_CAMO_ASSIGN_TO_REGIONAL_OFFICE_DROPDOWN_LABEL_VISN": "VISN", "VHA_CAREGIVER_LABEL": "CSP", + "VHA_INCOMPLETE_TAB_DESCRIPTION": "Cases that have been only saved and not yet established. Select the claimant name if you need to edit issues.", "EDUCATION_LABEL": "Education Service", "PRE_DOCKET_TASK_LABEL": "Pre-Docket", "DOCKET_APPEAL_MODAL_TITLE": "Docket appeal", @@ -1229,7 +1238,6 @@ "VHA_ASSIGN_TO_REGIONAL_OFFICE_MODAL_TITLE": "Assign to VAMC/VISN", "VHA_ASSIGN_TO_REGIONAL_OFFICE_RADIO_LABEL": "Find the VISN by:", "VHA_ASSIGN_TO_REGIONAL_OFFICE_INSTRUCTIONS_LABEL": "Provide additional context for this action", - "PRE_DOCKET_MODAL_BODY": "Provide instructions and context for this action", "VHA_PROGRAM_OFFICE_SELECTOR_PLACEHOLDER": "Select Program Office", "VHA_REGIONAL_OFFICE_SELECTOR_PLACEHOLDER": "Select VISN/VA Medical Center", "VHA_COMPLETE_TASK_MODAL_TITLE": "Where were documents regarding this appeal stored?", @@ -1396,5 +1404,12 @@ "DATE_SELECTOR_INVALID_DATE_ERROR": "Please select a valid date", "VHA_ACTION_PLACE_CUSTOM_HOLD_COPY": "Enter a custom number of days for the hold (Value must be between 1 and 45 for VHA users)", "VHA_CANCEL_TASK_INSTRUCTIONS_LABEL": "Why are you returning? Provide any important context", - "DISPOSITION_DECISION_DATE_LABEL": "Thank you for completing your decision in Caseflow. Please indicate the decision date." + "DISPOSITION_DECISION_DATE_LABEL": "Thank you for completing your decision in Caseflow. Please indicate the decision date.", + "PROVIDE_INSTRUCTIONS_AND_CONTEXT_LABEL": "Provide instructions and context for this action", + "VHA_ADD_DECISION_DATE_TO_ISSUE_SUCCESS_MESSAGE": "You have successfully updated an issue's decision date", + "NO_DATE_ENTERED": "No date entered", + "REFRESH_POA": "Refresh POA", + "POA_SUCCESSFULLY_REFRESH_MESSAGE": "Successfully refreshed. No power of attorney information was found at this time.", + "POA_UPDATED_SUCCESSFULLY": "POA Updated Successfully", + "EMPLOYER_IDENTIFICATION_NUMBER": "Employer Identification Number" } diff --git a/client/app/2.0/utils/reader/format.js b/client/app/2.0/utils/reader/format.js index 2856e171db0..e2e8ef1d663 100644 --- a/client/app/2.0/utils/reader/format.js +++ b/client/app/2.0/utils/reader/format.js @@ -24,6 +24,7 @@ export const formatFilterCriteria = (filterCriteria) => { category: Object.keys(filterCriteria.category).filter((cat) => filterCriteria.category[cat] === true). map((key) => formatCategoryName(key)), tag: Object.keys(filterCriteria.tag).filter((tag) => filterCriteria.tag[tag] === true), + docType: Object.keys(filterCriteria.docType), searchQuery: filterCriteria.searchQuery.toLowerCase() }; diff --git a/client/app/certification/ConfirmCaseDetails.jsx b/client/app/certification/ConfirmCaseDetails.jsx index 63b0f9d1644..3d1501b2545 100644 --- a/client/app/certification/ConfirmCaseDetails.jsx +++ b/client/app/certification/ConfirmCaseDetails.jsx @@ -122,19 +122,12 @@ const ERRORS = { */ export class ConfirmCaseDetails extends React.Component { - // TODO: updating state in UNSAFE_componentWillMount is - // sometimes thought of as an anti-pattern. - // is there a better way to do this? - // eslint-disable-next-line camelcase - UNSAFE_componentWillMount() { - this.props.updateProgressBar(); - } - componentWillUnmount() { this.props.resetState(); } componentDidMount() { + this.props.updateProgressBar(); window.scrollTo(0, 0); } diff --git a/client/app/certification/ConfirmHearing.jsx b/client/app/certification/ConfirmHearing.jsx index 99fa968d129..80c5ffb6884 100644 --- a/client/app/certification/ConfirmHearing.jsx +++ b/client/app/certification/ConfirmHearing.jsx @@ -122,18 +122,12 @@ const ERRORS = { // TODO: refactor to use shared components where helpful export class ConfirmHearing extends React.Component { - // TODO: updating state in UNSAFE_componentWillMount is - // sometimes thought of as an anti-pattern. - // is there a better way to do this? - UNSAFE_componentWillMount() { - this.props.updateProgressBar(); - } - componentWillUnmount() { this.props.resetState(); } componentDidMount() { + this.props.updateProgressBar(); window.scrollTo(0, 0); } @@ -429,5 +423,12 @@ ConfirmHearing.propTypes = { hearingPreference: PropTypes.string, onHearingPreferenceChange: PropTypes.func, match: PropTypes.object.isRequired, - certificationStatus: PropTypes.string + certificationStatus: PropTypes.string, + resetState: PropTypes.func, + updateProgressBar: PropTypes.func, + showValidationErrors: PropTypes.func, + certificationUpdateStart: PropTypes.func, + loading: PropTypes.bool, + serverError: PropTypes.bool, + updateConfirmHearingSucceeded: PropTypes.func }; diff --git a/client/app/certification/DocumentsCheck.jsx b/client/app/certification/DocumentsCheck.jsx index d74b3b93f5a..0cec2fae1d3 100644 --- a/client/app/certification/DocumentsCheck.jsx +++ b/client/app/certification/DocumentsCheck.jsx @@ -15,11 +15,7 @@ import CertificationProgressBar from './CertificationProgressBar'; import WindowUtil from '../util/WindowUtil'; export class DocumentsCheck extends React.Component { - // TODO: updating state in UNSAFE_componentWillMount is - // sometimes thought of as an anti-pattern. - // is there a better way to do this? - // eslint-disable-next-line camelcase - UNSAFE_componentWillMount() { + componentDidMount() { this.props.updateProgressBar(); } @@ -57,13 +53,13 @@ export class DocumentsCheck extends React.Component {

If the document status is marked with an , try checking:

+ labeled correctly.
    The document date in VBMS. NOD and Form 9 dates must match their VACOLS dates. SOC and SSOC dates are considered matching if the VBMS date is the same as the VACOLS date, or if the VBMS date is 4 days or fewer before the VACOLS date. Learn more about document dates.

Once you've made corrections,  - refresh this page.

+ refresh this page.

If you can't find the document, cancel this certification.

; diff --git a/client/app/certification/SignAndCertify.jsx b/client/app/certification/SignAndCertify.jsx index cef885d12ca..894e42bcf9b 100644 --- a/client/app/certification/SignAndCertify.jsx +++ b/client/app/certification/SignAndCertify.jsx @@ -41,15 +41,9 @@ const ERRORS = { }; export class SignAndCertify extends React.Component { - // TODO: updating state in UNSAFE_componentWillMount is - // sometimes thought of as an anti-pattern. - // is there a better way to do this? - UNSAFE_componentWillMount() { - this.props.updateProgressBar(); - } - /* eslint class-methods-use-this: ["error", { "exceptMethods": ["componentDidMount"] }] */ componentDidMount() { + this.props.updateProgressBar(); window.scrollTo(0, 0); } @@ -267,5 +261,11 @@ SignAndCertify.propTypes = { erroredFields: PropTypes.array, scrollToError: PropTypes.bool, match: PropTypes.object.isRequired, - certificationStatus: PropTypes.string + certificationStatus: PropTypes.string, + updateProgressBar: PropTypes.func, + showValidationErrors: PropTypes.func, + certificationUpdateStart: PropTypes.func, + loading: PropTypes.bool, + serverError: PropTypes.bool, + updateSucceeded: PropTypes.bool }; diff --git a/client/app/components/Alert.jsx b/client/app/components/Alert.jsx index 2d851edb3a8..0fcc4da8983 100644 --- a/client/app/components/Alert.jsx +++ b/client/app/components/Alert.jsx @@ -13,7 +13,7 @@ export default class Alert extends React.Component { messageDiv() { const message = this.props.children || this.props.message; - return
{message}
; + return
{message}
; } render() { @@ -56,6 +56,7 @@ Alert.propTypes = { */ lowerMargin: PropTypes.bool, message: PropTypes.node, + messageStyling: PropTypes.oneOfType([PropTypes.string, PropTypes.object]), /** * If empty, a "slim" alert is displayed diff --git a/client/app/components/AmaIssueList.jsx b/client/app/components/AmaIssueList.jsx index 188e14c5c47..7a509e63f52 100644 --- a/client/app/components/AmaIssueList.jsx +++ b/client/app/components/AmaIssueList.jsx @@ -82,7 +82,7 @@ export default class AmaIssueList extends React.PureComponent { {requestIssues.map((issue, i) => { const error = errorMessages && errorMessages[issue.id]; - return + return { error && {error} diff --git a/client/app/components/DateSelector.jsx b/client/app/components/DateSelector.jsx index 368e753cc4c..f692a209cbf 100644 --- a/client/app/components/DateSelector.jsx +++ b/client/app/components/DateSelector.jsx @@ -1,4 +1,4 @@ -import React from 'react'; +import React, { useEffect, useState } from 'react'; import PropTypes from 'prop-types'; import TextField from '../components/TextField'; import ValidatorsUtil from '../util/ValidatorsUtil'; @@ -7,6 +7,8 @@ import COPY from '../../COPY'; const DEFAULT_TEXT = 'mm/dd/yyyy'; export const DateSelector = (props) => { + const [dateError, setDateError] = useState(null); + const { dateValidator, futureDate } = ValidatorsUtil; const { @@ -21,6 +23,8 @@ export const DateSelector = (props) => { value, dateErrorMessage, noFutureDates = false, + inputStyling, + validateDate, ...passthroughProps } = props; @@ -42,6 +46,13 @@ export const DateSelector = (props) => { return null; }; + useEffect(() => { + const errorMsg = dateValidationError(value); + + setDateError(errorMsg); + validateDate?.(value !== '' && errorMsg === null); + }, [value]); + let max = '9999-12-31'; if (noFutureDates) { @@ -57,13 +68,14 @@ export const DateSelector = (props) => { readOnly={readOnly} type={type} value={value} - validationError={dateValidationError(value)} + validationError={dateError} onChange={onChange} placeholder={DEFAULT_TEXT} required={required} {...passthroughProps} max={max} dateErrorMessage={dateErrorMessage} + inputStyling={inputStyling} /> ); }; @@ -74,7 +86,7 @@ DateSelector.propTypes = { * The initial value of the `input` element; use for uncontrolled components where not using `value` prop */ defaultValue: PropTypes.string, - + inputStyling: PropTypes.object, dateErrorMessage: PropTypes.string, /** @@ -133,12 +145,17 @@ DateSelector.propTypes = { /** * The value of the `input` element; required for a controlled component */ - value: PropTypes.string, + value: PropTypes.oneOfType([PropTypes.string, PropTypes.bool]), /** * Disables future dates from being selected or entered */ - noFutureDates: PropTypes.bool + noFutureDates: PropTypes.bool, + + /** + * Disables form submission if date is empty or invalid + */ + validateDate: PropTypes.func }; export default DateSelector; diff --git a/client/app/components/FlowModal.jsx b/client/app/components/FlowModal.jsx index 5c518d46db7..99ce9a2ddb0 100644 --- a/client/app/components/FlowModal.jsx +++ b/client/app/components/FlowModal.jsx @@ -84,7 +84,7 @@ export default class FlowModal extends React.PureComponent { FlowModal.defaultProps = { button: COPY.MODAL_SUBMIT_BUTTON, - submitButtonClassNames: ['usa-button-secondary', 'usa-button-hover', 'usa-button-warning'], + submitButtonClassNames: ['usa-button', 'usa-button-hover', 'usa-button-warning'], pathAfterSubmit: '/queue', submitDisabled: false, title: '', diff --git a/client/app/components/LoadingDataDisplay.jsx b/client/app/components/LoadingDataDisplay.jsx index 449e54a0e61..f9d63b59081 100644 --- a/client/app/components/LoadingDataDisplay.jsx +++ b/client/app/components/LoadingDataDisplay.jsx @@ -4,6 +4,7 @@ import PropTypes from 'prop-types'; import LoadingScreen from './LoadingScreen'; import StatusMessage from './StatusMessage'; import COPY from '../../COPY'; +import { recordAsyncMetrics } from '../util/Metrics'; const PROMISE_RESULTS = { SUCCESS: 'SUCCESS', @@ -42,10 +43,23 @@ class LoadingDataDisplay extends React.PureComponent { this.setState({ promiseStartTimeMs: Date.now() }); + const metricData = { + message: this.props.loadingComponentProps?.message || 'loading screen', + type: 'performance', + data: { + failStatusMessageProps: this.props.failStatusMessageProps, + loadingComponentProps: this.props.loadingComponentProps, + slowLoadMessage: this.props.slowLoadMessage, + slowLoadThresholdMs: this.props.slowLoadThresholdMs, + timeoutMs: this.props.timeoutMs, + prefetchDisabled: this.props.prefetchDisabled + } + }; + // Promise does not give us a way to "un-then" and stop listening // when the component unmounts. So we'll leave this reference dangling, // but at least we can use this._isMounted to avoid taking action if necessary. - promise.then( + recordAsyncMetrics(promise, metricData, this.props.metricsLoadScreen).then( () => { if (!this._isMounted) { return; @@ -93,9 +107,8 @@ class LoadingDataDisplay extends React.PureComponent { this._isMounted = false; } - // eslint-disable-next-line camelcase - UNSAFE_componentWillReceiveProps(nextProps) { - if (this.props.createLoadPromise.toString() !== nextProps.createLoadPromise.toString()) { + componentDidUpdate(prevProps) { + if (this.props.createLoadPromise.toString() !== prevProps.createLoadPromise.toString()) { throw new Error("Once LoadingDataDisplay is instantiated, you can't change the createLoadPromise function."); } } @@ -162,7 +175,9 @@ LoadingDataDisplay.propTypes = { loadingComponentProps: PropTypes.object, slowLoadMessage: PropTypes.string, slowLoadThresholdMs: PropTypes.number, - timeoutMs: PropTypes.number + timeoutMs: PropTypes.number, + metricsLoadScreen: PropTypes.bool, + prefetchDisabled: PropTypes.bool, }; LoadingDataDisplay.defaultProps = { @@ -173,7 +188,8 @@ LoadingDataDisplay.defaultProps = { errorComponent: StatusMessage, loadingComponentProps: {}, failStatusMessageProps: {}, - failStatusMessageChildren: DEFAULT_UNKNOWN_ERROR_MSG + failStatusMessageChildren: DEFAULT_UNKNOWN_ERROR_MSG, + metricsLoadScreen: false, }; export default LoadingDataDisplay; diff --git a/client/app/components/PageRoute.jsx b/client/app/components/PageRoute.jsx index a9646eaf574..4739b2b1f23 100644 --- a/client/app/components/PageRoute.jsx +++ b/client/app/components/PageRoute.jsx @@ -31,7 +31,10 @@ const PageRoute = (props) => { // Render the Loading Screen while the default route props are loading return loading ? - : + : ; }; diff --git a/client/app/components/RadioField.jsx b/client/app/components/RadioField.jsx index eb6b5cf52f9..1880ba7c00e 100644 --- a/client/app/components/RadioField.jsx +++ b/client/app/components/RadioField.jsx @@ -42,7 +42,8 @@ export const RadioField = (props) => { strongLabel, hideLabel, styling, - vertical + vertical, + optionsStyling } = props; const isVertical = useMemo(() => props.vertical || props.options.length > 2, [ @@ -67,11 +68,11 @@ export const RadioField = (props) => { const maybeAddTooltip = (option, radioField) => { if (option.tooltipText) { - const idKey = `tooltip-${option.value}`; + const keyId = `tooltip-${option.value}`; return { {errorMessage} )} -
+
{options.map((option, i) => { const optionDisabled = isDisabled(option); const radioField = (
{ key={`${idPart}-${option.value}-${i}`} > ({ + width: '80%' + }), + + menu: () => ({ + border: '1px solid black', + }), + + valueContainer: (styles) => ({ + + ...styles, + lineHeight: 'normal', + // this is a hack to fix a problem with changing the height of the dropdown component. + // Changing the height causes problems with text shifting. + marginTop: '-10%', + marginBottom: '-10%', + paddingTop: '-10%', + minHeight: '44px', + + }), + singleValue: (styles) => { + return { + ...styles, + alignContent: 'center', + }; + }, + + placeholder: (styles) => ({ + ...styles + }), + + option: (styles, { isFocused }) => ({ + color: 'black', + alignContent: 'center', + backgroundColor: isFocused ? 'white' : 'null', + ':hover': { + ...styles[':hover'], + backgroundColor: '#5c9ceb', + color: 'white' + } + }) +}; + +const selectContainerStyles = css({ + width: '100%', + display: 'inline-block' +}); + +const ReactSelectDropdown = (props) => { + return ( +
+ + + +
+ + + { loading && + + + + + + } +
)} {validationError && ( @@ -127,6 +149,7 @@ TextField.propTypes = { defaultValue: PropTypes.oneOfType([PropTypes.string, PropTypes.number]), errorMessage: PropTypes.string, className: PropTypes.arrayOf(PropTypes.string), + id: PropTypes.string, inputStyling: PropTypes.object, /** @@ -181,6 +204,7 @@ TextField.propTypes = { optional: PropTypes.bool.isRequired, type: PropTypes.string, validationError: PropTypes.string, + loading: PropTypes.bool, /** * The value of the `input` element; required for a controlled component diff --git a/client/app/components/badges/MstBadge/__snapshots__/MstBadge.test.js.snap b/client/app/components/badges/MstBadge/__snapshots__/MstBadge.test.js.snap index 2d1fdb4ab01..99038478f4f 100644 --- a/client/app/components/badges/MstBadge/__snapshots__/MstBadge.test.js.snap +++ b/client/app/components/badges/MstBadge/__snapshots__/MstBadge.test.js.snap @@ -21,7 +21,7 @@ exports[`MstBadge renders correctly 1`] = ` > { // if the callee only passed a number, append 'px' if (!(/\D/).test(imgSize)) { imgSize += 'px'; - console.warn( - 'LoadingIcon() size argument', size, 'converted to', imgSize - ); } const style = { marginLeft: `-${imgSize}` }; diff --git a/client/app/containers/EstablishClaimPage/EstablishClaimAssociateEP.jsx b/client/app/containers/EstablishClaimPage/EstablishClaimAssociateEP.jsx index af18f7ed6da..900576d0eb8 100644 --- a/client/app/containers/EstablishClaimPage/EstablishClaimAssociateEP.jsx +++ b/client/app/containers/EstablishClaimPage/EstablishClaimAssociateEP.jsx @@ -21,8 +21,7 @@ export class AssociatePage extends React.Component { }; } - // eslint-disable-next-line camelcase - UNSAFE_componentWillMount() { + componentDidMount() { if (!this.props.endProducts.length) { this.props.history.goBack(); } diff --git a/client/app/containers/stats/StatsContainer.jsx b/client/app/containers/stats/StatsContainer.jsx deleted file mode 100644 index f18fc60ebab..00000000000 --- a/client/app/containers/stats/StatsContainer.jsx +++ /dev/null @@ -1,49 +0,0 @@ -import React from 'react'; -import AppFrame from '../../components/AppFrame'; -import AppSegment from '@department-of-veterans-affairs/caseflow-frontend-toolkit/components/AppSegment'; -import NavigationBar from '../../components/NavigationBar'; -import Footer from '@department-of-veterans-affairs/caseflow-frontend-toolkit/components/Footer'; -import { COLORS } from '@department-of-veterans-affairs/caseflow-frontend-toolkit/util/StyleConstants'; -import { BrowserRouter } from 'react-router-dom'; -import PropTypes from 'prop-types'; - -const StatsContainer = (props) => - - - - -

Caseflow Stats

- - -
-
-