diff --git a/.github/workflows/build-linux.yml b/.github/workflows/build-linux.yml index ac0ee3308c..b5ff9aa391 100644 --- a/.github/workflows/build-linux.yml +++ b/.github/workflows/build-linux.yml @@ -35,6 +35,9 @@ on: extras: type: boolean default: false + tests: + type: boolean + default: false gcc-ver: type: string default: "10" @@ -112,6 +115,7 @@ jobs: -DBUILD_SIZECHECK:BOOL=${{ inputs.extras }} \ -DBUILD_SKELETON:BOOL=${{ inputs.extras }} \ -DBUILD_DOCS:BOOL=${{ inputs.docs }} \ + -DBUILD_TESTS:BOOL=${{ inputs.tests }} \ -DINSTALL_DATA_FILES:BOOL=${{ inputs.common-files }} \ -DINSTALL_SCRIPTS:BOOL=${{ inputs.common-files }} - name: Build DFHack diff --git a/.github/workflows/build-windows.yml b/.github/workflows/build-windows.yml index acd7db7ce9..16a947fb4e 100644 --- a/.github/workflows/build-windows.yml +++ b/.github/workflows/build-windows.yml @@ -5,6 +5,8 @@ on: inputs: dfhack_ref: type: string + scripts_ref: + type: string structures_ref: type: string artifact-name: @@ -21,6 +23,15 @@ on: common-files: type: boolean default: true + docs: + type: boolean + default: false + stonesense: + type: boolean + default: false + tests: + type: boolean + default: false launchdf: type: boolean default: false @@ -40,6 +51,13 @@ jobs: ref: ${{ inputs.dfhack_ref }} submodules: true fetch-depth: 0 + - name: Clone scripts + if: inputs.scripts_ref + uses: actions/checkout@v3 + with: + repository: 'DFHack/scripts' + ref: ${{ inputs.scripts_ref }} + path: scripts - name: Clone structures if: inputs.structures_ref uses: actions/checkout@v3 @@ -65,14 +83,14 @@ jobs: win-msvc - name: Cross-compile env: - CMAKE_EXTRA_ARGS: '-DBUILD_STONESENSE:BOOL=1 -DBUILD_DOCS:BOOL=${{ inputs.common-files }} -DINSTALL_DATA_FILES:BOOL=${{ inputs.common-files }} -DINSTALL_SCRIPTS:BOOL=${{ inputs.common-files }} -DBUILD_DFLAUNCH:BOOL=${{ inputs.launchdf }}' + CMAKE_EXTRA_ARGS: '-DBUILD_STONESENSE:BOOL=${{ inputs.stonesense }} -DBUILD_DOCS:BOOL=${{ inputs.docs }} -DINSTALL_DATA_FILES:BOOL=${{ inputs.common-files }} -DINSTALL_SCRIPTS:BOOL=${{ inputs.common-files }} -DBUILD_DFLAUNCH:BOOL=${{ inputs.launchdf }} -DBUILD_TESTS:BOOL=${{ inputs.tests }}' run: | cd build bash -x build-win64-from-linux.sh - name: Trim cache run: | cd build - ccache -d win64-cross/ccache --max-size 200M + ccache -d win64-cross/ccache --max-size 150M ccache -d win64-cross/ccache --cleanup ccache -d win64-cross/ccache --show-stats --verbose - name: Save ccache diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 4227c3f04d..051fc55b73 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -25,6 +25,8 @@ jobs: artifact-name: dfhack-win64-build append-date-and-hash: true cache-id: release + stonesense: true + docs: true secrets: inherit docs: diff --git a/.github/workflows/github-release.yml b/.github/workflows/github-release.yml index 6922921253..56bc267025 100644 --- a/.github/workflows/github-release.yml +++ b/.github/workflows/github-release.yml @@ -20,6 +20,8 @@ jobs: dfhack_ref: ${{ github.event.inputs && github.event.inputs.ref || github.event.ref }} cache-id: release cache-readonly: true + stonesense: true + docs: true launchdf: true secrets: inherit diff --git a/.github/workflows/steam-deploy.yml b/.github/workflows/steam-deploy.yml index a44c0295fb..5b14d0bc89 100644 --- a/.github/workflows/steam-deploy.yml +++ b/.github/workflows/steam-deploy.yml @@ -43,6 +43,7 @@ jobs: cache-id: release cache-readonly: true common-files: false + stonesense: true launchdf: true secrets: inherit diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 0fb019abf1..17a0872fc5 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -11,6 +11,17 @@ on: type: string jobs: + build-windows: + name: Windows MSVC + uses: ./.github/workflows/build-windows.yml + with: + dfhack_ref: ${{ inputs.dfhack_ref }} + scripts_ref: ${{ inputs.scripts_ref }} + structures_ref: ${{ inputs.structures_ref }} + artifact-name: test-msvc + cache-id: test + tests: true + build-linux: name: Linux gcc-${{ matrix.gcc }} uses: ./.github/workflows/build-linux.yml @@ -22,6 +33,7 @@ jobs: cache-id: test stonesense: ${{ matrix.plugins == 'all' }} extras: ${{ matrix.plugins == 'all' }} + tests: true gcc-ver: ${{ matrix.gcc }} secrets: inherit strategy: @@ -33,51 +45,78 @@ jobs: - gcc: 12 plugins: "all" - test-linux: - name: Test (Linux, GCC ${{ matrix.gcc }}, ${{ matrix.plugins }} plugins) - needs: build-linux - runs-on: ubuntu-latest + test-windows: + name: Test (${{ matrix.os }}, ${{ matrix.compiler }}, ${{ matrix.plugins }} plugins) + needs: + - build-windows + - build-linux + runs-on: ${{ matrix.os }}-latest strategy: fail-fast: false matrix: include: - - gcc: 10 + - os: windows + compiler: msvc plugins: "default" - - gcc: 12 - plugins: "all" + # TODO: uncomment once we have a linux build we can download from bay12 + # - os: ubuntu + # compiler: gcc-10 + # gcc: 10 + # plugins: "default" + # - os: ubuntu + # compiler: gcc-10 + # gcc: 12 + # plugins: "all" steps: - - name: Download artifact + - name: Set env (windows) + if: matrix.os == 'windows' + run: echo "DF_FOLDER=DF" >> $env:GITHUB_ENV + - name: Set env (posix) + if: matrix.os != 'windows' + run: echo "DF_FOLDER=DF" >> $GITHUB_ENV + - name: Clone DFHack + uses: actions/checkout@v3 + with: + repository: 'DFHack/dfhack' + ref: ${{ inputs.dfhack_ref }} + - name: Detect DF version (windows) + if: matrix.os == 'windows' + run: echo DF_VERSION="$(sh ci/get-df-version.sh)" >> $env:GITHUB_ENV + - name: Detect DF version (posix) + if: matrix.os != 'windows' + run: echo DF_VERSION="$(sh ci/get-df-version.sh)" >> $GITHUB_ENV + - name: Fetch DF cache + id: restore-df + uses: actions/cache/restore@v3 + with: + path: ${{ env.DF_FOLDER }} + key: df-${{ matrix.os }}-${{ env.DF_VERSION }}-${{ hashFiles('ci/download-df.sh') }} + - name: Download DF + if: steps.restore-df.outputs.cache-hit != 'true' + run: sh ci/download-df.sh ${{ env.DF_FOLDER }} ${{ matrix.os }} ${{ env.DF_VERSION }} + - name: Save DF cache + if: steps.restore-df.outputs.cache-hit != 'true' + uses: actions/cache/save@v3 + with: + path: ${{ env.DF_FOLDER }} + key: df-${{ matrix.os }}-${{ env.DF_VERSION }}-${{ hashFiles('ci/download-df.sh') }} + - name: Download DFHack uses: actions/download-artifact@v3 with: - name: test-gcc-${{ matrix.gcc }} - # - name: Fetch DF cache - # uses: actions/cache@v3 - # with: - # path: ~/DF - # key: df-${{ hashFiles('ci/download-df.sh') }} - # - name: Download DF - # run: | - # sh ci/download-df.sh - # - name: Run lua tests - # id: run_tests_lua - # run: | - # export TERM=dumb - # status=0 - # script -qe -c "python ci/run-tests.py --headless --keep-status \"$DF_FOLDER\"" || status=$((status + 1)) - # python ci/check-rpc.py "$DF_FOLDER/dfhack-rpc.txt" || status=$((status + 2)) - # mkdir -p artifacts - # cp "$DF_FOLDER"/test*.json "$DF_FOLDER"/*.log artifacts || status=$((status + 4)) - # exit $status - # - name: Upload test artifacts - # uses: actions/upload-artifact@v3 - # if: (success() || failure()) && steps.run_tests.outcome != 'skipped' - # continue-on-error: true - # with: - # name: test-artifacts-${{ matrix.gcc }} - # path: artifacts - # - name: Clean up DF folder - # # prevent DFHack-generated files from ending up in the cache - # # (download-df.sh also removes them, this is just to save cache space) - # if: success() || failure() - # run: | - # rm -rf "$DF_FOLDER" + name: test-${{ matrix.compiler }} + path: ${{ env.DF_FOLDER }} + - name: Run lua tests + timeout-minutes: 10 + run: python ci/run-tests.py --keep-status "${{ env.DF_FOLDER }}" + - name: Check RPC interface + run: python ci/check-rpc.py "${{ env.DF_FOLDER }}/dfhack-rpc.txt" + - name: Upload test artifacts + uses: actions/upload-artifact@v3 + if: always() + continue-on-error: true + with: + name: test-artifacts-msvc + path: | + ${{ env.DF_FOLDER }}/dfhack-rpc.txt + ${{ env.DF_FOLDER }}/test*.json + ${{ env.DF_FOLDER }}/*.log diff --git a/ci/check-rpc.py b/ci/check-rpc.py index aba3e38115..be7d079868 100755 --- a/ci/check-rpc.py +++ b/ci/check-rpc.py @@ -1,8 +1,10 @@ #!/usr/bin/env python3 import glob +import itertools import sys actual = {'': {}} +SEP = ('=' * 80) with open(sys.argv[1]) as f: plugin_name = '' @@ -26,7 +28,7 @@ parts = line.split(' ') expected[''][parts[2]] = (parts[4], parts[6]) -for p in glob.iglob('plugins/proto/*.proto'): +for p in itertools.chain(glob.iglob('plugins/proto/*.proto'), glob.iglob('plugins/*/proto/*.proto')): plugin_name = '' with open(p) as f: for line in f: @@ -53,6 +55,7 @@ methods = actual[plugin_name] if plugin_name not in expected: + print(SEP) print('Missing documentation for plugin proto files: ' + plugin_name) print('Add the following lines:') print('// Plugin: ' + plugin_name) @@ -73,12 +76,14 @@ missing.append('// RPC ' + m + ' : ' + io[0] + ' -> ' + io[1]) if len(missing) > 0: + print(SEP) print('Incomplete documentation for ' + ('core' if plugin_name == '' else 'plugin "' + plugin_name + '"') + ' proto files. Add the following lines:') for m in missing: print(m) error_count += 1 if len(wrong) > 0: + print(SEP) print('Incorrect documentation for ' + ('core' if plugin_name == '' else 'plugin "' + plugin_name + '"') + ' proto files. Replace the following comments:') for m in wrong: print(m) @@ -88,6 +93,7 @@ methods = expected[plugin_name] if plugin_name not in actual: + print(SEP) print('Incorrect documentation for plugin proto files: ' + plugin_name) print('The following methods are documented, but the plugin does not provide any RPC methods:') for m in methods: @@ -102,6 +108,7 @@ missing.append('// RPC ' + m + ' : ' + io[0] + ' -> ' + io[1]) if len(missing) > 0: + print(SEP) print('Incorrect documentation for ' + ('core' if plugin_name == '' else 'plugin "' + plugin_name + '"') + ' proto files. Remove the following lines:') for m in missing: print(m) diff --git a/ci/download-df.sh b/ci/download-df.sh index 8d94d54ddd..12e9a41e34 100755 --- a/ci/download-df.sh +++ b/ci/download-df.sh @@ -1,52 +1,50 @@ #!/bin/sh +DF_FOLDER=$1 +OS_TARGET=$2 +DF_VERSION=$3 + set -e -df_tardest="df.tar.bz2" -save_tardest="test_save.tgz" - -cd "$(dirname "$0")" -echo "DF_VERSION: $DF_VERSION" -echo "DF_FOLDER: $DF_FOLDER" -mkdir -p "$DF_FOLDER" -# back out of df_linux -cd "$DF_FOLDER/.." - -if ! test -f "$df_tardest"; then - minor=$(echo "$DF_VERSION" | cut -d. -f2) - patch=$(echo "$DF_VERSION" | cut -d. -f3) - echo "Downloading DF $DF_VERSION" - while read url; do - echo "Attempting download: ${url}" - if wget -v "$url" -O "$df_tardest"; then - break - fi - done <] [] If a done_command is specified, it will be run after the tests complete. -Options: - - -h, --help display this help message and exit. - -d, --test_dir specifies which directory to look in for tests. defaults to - the "hack/scripts/test" folder in your DF installation. - -m, --modes only run tests in the given comma separated list of modes. - see the next section for a list of valid modes. if not - specified, the tests are not filtered by modes. - -r, --resume skip tests that have already been run. remove the - test_status.json file to reset the record. - -s, --save_dir the save folder to load for "fortress" mode tests. this - save is only loaded if a fort is not already loaded when - a "fortress" mode test is run. if not specified, defaults to - 'region1'. - -t, --tests only run tests that match one of the comma separated list of - patterns. if not specified, no tests are filtered. - -Modes: - - none the test can be run on any screen - title the test must be run on the DF title screen. note that if the game - has a map loaded, "title" mode tests cannot be run - fortress the test must be run while a map is loaded. if the game is - currently on the title screen, the save specified by the save_dir - parameter will be loaded. - -Examples: - - test runs all tests - test -r runs all tests that haven't been run before - test -m none runs tests that don't need the game to be in a - specific mode - test -t quickfort runs quickfort tests - test -d /path/to/dfhack-scripts/repo/test - runs tests in your dev scripts repo +Options +------- + +-d, --test_dir specifies which directory to look in for tests. defaults to + the "hack/scripts/test" folder in your DF installation. +-m, --modes only run tests in the given comma separated list of modes. + see the next section for a list of valid modes. if not + specified, the tests are not filtered by modes. +-r, --resume skip tests that have already been run. remove the + test_status.json file to reset the record. +-s, --save_dir the save folder to load for "fortress" mode tests. this + save is only loaded if a fort is not already loaded when + a "fortress" mode test is run. if not specified, defaults to + 'region1'. +-t, --tests only run tests that match one of the comma separated list of + patterns. if not specified, no tests are filtered and all tessts + are run. + +Modes +----- + +none the test can be run on any screen +title the test must be run on the DF title screen. note that if the game + has a map loaded, "title" mode tests cannot be run +fortress the test must be run while a map is loaded. if the game is + currently on the title screen, the save specified by the save_dir + parameter will be loaded. + +Examples +-------- + +test runs all tests +test -r runs all tests that haven't been run before +test -m none runs tests that don't need the game to be in a + specific mode +test -t quickfort runs quickfort tests +test -d /path/to/dfhack-scripts/repo/test + runs tests in your dev scripts repo Default values for the options may be set in a file named test_config.json in your DF folder. Options with comma-separated values should be written as json @@ -352,33 +363,46 @@ local function load_tests(file, tests) if not code then dfhack.printerr('Failed to load file: ' .. tostring(err)) return false - else - dfhack.internal.IN_TEST = true - local ok, err = dfhack.pcall(code) - dfhack.internal.IN_TEST = false - if not ok then - dfhack.printerr('Error when running file: ' .. tostring(err)) - return false - else - if not MODES[env.config.mode] then - dfhack.printerr('Invalid config.mode: ' .. tostring(env.config.mode)) - return false - end - for name, test_func in pairs(env.test) do - if env.config.wrapper then - local fn = test_func - test_func = function() env.config.wrapper(fn) end - end - local test_data = { - full_name = short_filename .. ':' .. name, - func = test_func, - private = env_private, - config = env.config, - } - test_data.name = test_data.full_name:gsub('test/', ''):gsub('.lua', '') - table.insert(tests, test_data) - end + end + dfhack.internal.IN_TEST = true + local ok, err = dfhack.pcall(code) + dfhack.internal.IN_TEST = false + if not ok then + dfhack.printerr('Error when running file: ' .. tostring(err)) + return false + end + if not MODES[env.config.mode] then + dfhack.printerr('Invalid config.mode: ' .. tostring(env.config.mode)) + return false + end + if not env.config.targets then + dfhack.printerr('Skipping tests for unspecified target in ' .. file) + return true -- TODO: change to false once existing tests have targets specified + end + local targets = type(env.config.targets) == table and env.config.targets or {env.config.targets} + for _,target in ipairs(targets) do + if target == 'core' then goto continue end + if type(target) ~= 'string' or not helpdb.is_entry(target) or + helpdb.get_entry_tags(target).unavailable + then + dfhack.printerr('Skipping tests for unavailable target: ' .. target) + return true + end + ::continue:: + end + for name, test_func in pairs(env.test) do + if env.config.wrapper then + local fn = test_func + test_func = function() env.config.wrapper(fn) end end + local test_data = { + full_name = short_filename .. ':' .. name, + func = test_func, + private = env_private, + config = env.config, + } + test_data.name = test_data.full_name:gsub('test/', ''):gsub('.lua', '') + table.insert(tests, test_data) end return true end @@ -575,7 +599,7 @@ local function dump_df_state() enabler = { fps = df.global.enabler.fps, gfps = df.global.enabler.gfps, - fullscreen = df.global.enabler.fullscreen, + fullscreen_state = df.global.enabler.fullscreen_state.whole, }, gps = { dimx = df.global.gps.dimx, diff --git a/plugins/CMakeLists.txt b/plugins/CMakeLists.txt index 6759225d16..b3b69fb12c 100644 --- a/plugins/CMakeLists.txt +++ b/plugins/CMakeLists.txt @@ -169,7 +169,7 @@ if(BUILD_SUPPORTED) #dfhack_plugin(trackstop trackstop.cpp) #dfhack_plugin(tubefill tubefill.cpp) #add_subdirectory(tweak) - #dfhack_plugin(workflow workflow.cpp LINK_LIBRARIES lua) + dfhack_plugin(workflow workflow.cpp LINK_LIBRARIES lua) dfhack_plugin(work-now work-now.cpp) dfhack_plugin(xlsxreader xlsxreader.cpp LINK_LIBRARIES lua xlsxio_read_STATIC zip expat) dfhack_plugin(zone zone.cpp LINK_LIBRARIES lua) diff --git a/plugins/proto/isoworldremote.proto b/plugins/proto/isoworldremote.proto index f53aa6aea9..ebe5ef545e 100644 --- a/plugins/proto/isoworldremote.proto +++ b/plugins/proto/isoworldremote.proto @@ -5,7 +5,7 @@ package isoworldremote; option optimize_for = LITE_RUNTIME; -// Plugin: isoworldremote +// DISABLED Plugin: isoworldremote enum BasicMaterial { AIR = 0; @@ -54,7 +54,7 @@ message EmbarkTile { optional bool is_valid = 7; } -// RPC GetEmbarkTile : TileRequest -> EmbarkTile +// DISABLED RPC GetEmbarkTile : TileRequest -> EmbarkTile message TileRequest { optional int32 want_x = 1; optional int32 want_y = 2; @@ -64,7 +64,7 @@ message MapRequest { optional string save_folder = 1; } -// RPC GetEmbarkInfo : MapRequest -> MapReply +// DISABLED RPC GetEmbarkInfo : MapRequest -> MapReply message MapReply { required bool available = 1; optional int32 region_x = 2; @@ -75,7 +75,7 @@ message MapReply { optional int32 current_season = 7; } -// RPC GetRawNames : MapRequest -> RawNames +// DISABLED RPC GetRawNames : MapRequest -> RawNames message RawNames { required bool available = 1; repeated string inorganic = 2; diff --git a/plugins/proto/rename.proto b/plugins/proto/rename.proto index 4391ecc10f..ef3f574239 100644 --- a/plugins/proto/rename.proto +++ b/plugins/proto/rename.proto @@ -4,9 +4,9 @@ package dfproto; option optimize_for = LITE_RUNTIME; -// Plugin: rename +// DISABLED Plugin: rename -// RPC RenameSquad : RenameSquadIn -> EmptyMessage +// DISABLED RPC RenameSquad : RenameSquadIn -> EmptyMessage message RenameSquadIn { required int32 squad_id = 1; @@ -14,7 +14,7 @@ message RenameSquadIn { optional string alias = 3; } -// RPC RenameUnit : RenameUnitIn -> EmptyMessage +// DISABLED RPC RenameUnit : RenameUnitIn -> EmptyMessage message RenameUnitIn { required int32 unit_id = 1; @@ -22,7 +22,7 @@ message RenameUnitIn { optional string profession = 3; } -// RPC RenameBuilding : RenameBuildingIn -> EmptyMessage +// DISABLED RPC RenameBuilding : RenameBuildingIn -> EmptyMessage message RenameBuildingIn { required int32 building_id = 1; diff --git a/test/core.lua b/test/core.lua index ba104c90a9..0d21fa38fc 100644 --- a/test/core.lua +++ b/test/core.lua @@ -1,3 +1,5 @@ +config.targets = 'core' + local function clean_path(p) -- todo: replace with dfhack.filesystem call? return p:gsub('\\', '/'):gsub('//', '/'):gsub('/$', '') diff --git a/test/encoding.lua b/test/encoding.lua index cb0a72664c..afed4bf8b7 100644 --- a/test/encoding.lua +++ b/test/encoding.lua @@ -1,3 +1,5 @@ +config.targets = 'core' + function test.toSearchNormalized() expect.eq(dfhack.toSearchNormalized(''), '') expect.eq(dfhack.toSearchNormalized('abcd'), 'abcd') diff --git a/test/test.lua b/test/test.lua index 11f038d66a..679325d2e5 100644 --- a/test/test.lua +++ b/test/test.lua @@ -1,3 +1,5 @@ +config.targets = 'core' + function test.internal_in_test() expect.true_(dfhack.internal.IN_TEST) end