diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index 1ccca84..0000000 --- a/.coveragerc +++ /dev/null @@ -1,9 +0,0 @@ -[run] -source = clingexplaid - tests -omit = */clingexplaid/__main__.py - */clingexplaid/utils/cli.py -[report] -exclude_lines = - assert - nocoverage diff --git a/.flake8 b/.flake8 deleted file mode 100644 index 31b8987..0000000 --- a/.flake8 +++ /dev/null @@ -1,6 +0,0 @@ -[flake8] -max-line-length = 120 -exclude = - .git - .github -ignore = E741 diff --git a/.github/workflows/ci-test.yml b/.github/workflows/ci-test.yml index 5accb48..f8f3706 100644 --- a/.github/workflows/ci-test.yml +++ b/.github/workflows/ci-test.yml @@ -23,16 +23,21 @@ jobs: - name: "checkout repository" uses: actions/checkout@v3 - - name: "setup python 3.7" + - name: "setup python 3.9" uses: actions/setup-python@v4 with: - python-version: 3.7 + python-version: 3.9 - name: "setup python 3.11" uses: actions/setup-python@v4 with: python-version: 3.11 - + + - name: "setup python 3.12" + uses: actions/setup-python@v4 + with: + python-version: 3.12 + - name: install nox run: python -m pip install nox diff --git a/.gitignore b/.gitignore index 6c794bd..0fd4d08 100644 --- a/.gitignore +++ b/.gitignore @@ -20,12 +20,12 @@ pip-delete-this-directory.txt # Unit test / coverage reports .pytest_cache/ +#venv +.venv + # pyenv .python-version -# venv -.venv - # mypy .mypy_cache/ .dmypy.json diff --git a/.isort.cfg b/.isort.cfg deleted file mode 100644 index f238bf7..0000000 --- a/.isort.cfg +++ /dev/null @@ -1,2 +0,0 @@ -[settings] -profile = black diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b503400..d2c4c30 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,27 +1,35 @@ repos: - repo: https://github.com/myint/autoflake - rev: v1.4 + rev: v2.3.0 hooks: - id: autoflake args: ["--in-place", "--imports=clingexplaid", "--ignore-init-module-imports", "--remove-unused-variables"] exclude: ^.github/ - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.3.0 + rev: v4.5.0 hooks: - id: end-of-file-fixer - id: trailing-whitespace exclude: ^.github/ - repo: https://github.com/pycqa/isort - rev: 5.11.5 + rev: 5.13.2 hooks: - id: isort - args: ["--profile", "black"] exclude: ^.github/ - repo: https://github.com/psf/black - rev: 22.3.0 + rev: 24.2.0 hooks: - id: black exclude: ^.github/ + + - repo: https://github.com/executablebooks/mdformat + rev: 0.7.17 + hooks: + - id: mdformat + args: ["--wrap", "79"] + exclude: ^doc/ + additional_dependencies: + - mdformat-gfm diff --git a/.pylintrc b/.pylintrc deleted file mode 100644 index 82e3aa7..0000000 --- a/.pylintrc +++ /dev/null @@ -1,31 +0,0 @@ -[FORMAT] - -max-line-length=120 - -[DESIGN] - -max-args=10 -max-attributes=7 -max-bool-expr=5 -max-branches=12 -max-locals=30 -max-parents=7 -max-public-methods=20 -max-returns=10 -max-statements=50 -min-public-methods=1 - -[SIMILARITIES] - -ignore-comments=yes -ignore-docstrings=yes -ignore-imports=yes -ignore-signatures=yes - -[BASIC] - -argument-rgx=^[a-z][a-z0-9]*((_[a-z0-9]+)*_?)?$ -variable-rgx=^[a-z][a-z0-9]*((_[a-z0-9]+)*_?)?$ - -# Good variable names which should always be accepted, separated by a comma. -good-names=_,M,N,B,A,Nn,Bn,An diff --git a/CHANGES.md b/CHANGES.md new file mode 100644 index 0000000..dcbbb26 --- /dev/null +++ b/CHANGES.md @@ -0,0 +1,5 @@ +# Changes + +## v0.1.0 + +- create project diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..d7e5503 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,31 @@ +# Contributing + +Thanks for considering a contribution to clingexplaid. ❤️ + +## How to get help or discuss possible contributions + +To avoid duplicating issues, please search our [issue tracker][issues] and our +[mailing list][mailing_list] before filing a new issue. + +- Open an [issue][new_issue] describing your problem. +- [Subscribe] to our mailing list on SourceForge. + +## How to make a contribution + +- Fork the [clingexplaid][project_url] repository and create a branch for your + changes. +- Submit a pull request to the master branch with your changes. +- Respond to feedback on your pull request. +- If everything is fine your pull request is merged. 🥳 + +## License + +When contributing to this project, you agree that you have authored 100% of the +content, that you have the necessary rights to the content and that the content +you contribute may be provided under the project license. + +[issues]: https://github.com/krr-up/clingo-explaidissues/ +[mailing_list]: https://sourceforge.net/p/potassco/mailman/potassco-users/ +[new_issue]: https://github.com/krr-up/clingo-explaidissues/new/ +[project_url]: https://github.com/krr-up/clingo-explaid +[subscribe]: https://sourceforge.net/projects/potassco/lists/potassco-users/ diff --git a/DEPLOYMENT.md b/DEPLOYMENT.md new file mode 100644 index 0000000..0cbbd77 --- /dev/null +++ b/DEPLOYMENT.md @@ -0,0 +1,15 @@ +# Deployment + +Releases are deployed on [pypi] whenever a tag of form `vMajor.Minor.Revision` +is pushed. Furthermore, the deployment workflow can be triggered manually to +deploy test releases on [test.pypi]. + +For this to work, the workflow has to be granted permission to deploy on the +two services. Please follow this packaging [guide] to setup your accounts +accordingly. We also recommend to setup a github [environment] to restrict +which contributors can deploy packages. + +[environment]: https://docs.github.com/en/actions/deployment/targeting-different-environments/using-environments-for-deployment/ +[guide]: https://packaging.python.org/en/latest/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows/ +[pypi]: https://pypi.org/ +[test.pypi]: https://test.pypi.org/ diff --git a/DEVELOPMENT.md b/DEVELOPMENT.md new file mode 100644 index 0000000..25fd335 --- /dev/null +++ b/DEVELOPMENT.md @@ -0,0 +1,41 @@ +# Development + +To improve code quality, we use [nox] to run linters, type checkers, unit +tests, documentation and more. We recommend installing nox using [pipx] to have +it available globally. + +```bash +# install +python -m pip install pipx +python -m pipx install nox + +# run all sessions +nox + +# list all sessions +nox -l + +# run individual session +nox -s session_name + +# run individual session (reuse install) +nox -Rs session_name +``` + +Note that the nox sessions create [editable] installs. In case there are +issues, try recreating environments by dropping the `-R` option. If your +project is incompatible with editable installs, adjust the `noxfile.py` to +disable them. + +We also provide a [pre-commit][pre] config to autoformat code upon commits. It +can be set up using the following commands: + +```bash +python -m pipx install pre-commit +pre-commit install +``` + +[editable]: https://setuptools.pypa.io/en/latest/userguide/development_mode.html +[nox]: https://nox.thea.codes/en/stable/index.html +[pipx]: https://pypa.github.io/pipx/ +[pre]: https://pre-commit.com/ diff --git a/LICENSE b/LICENSE index 7623932..6380a11 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2021 Susana Hahn +Copyright (c) 2024 Hannes Weichelt Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/README.md b/README.md index 463d734..873bb1b 100644 --- a/README.md +++ b/README.md @@ -1,35 +1,52 @@ # clingexplaid -This project template is configured to ease collaboration. Linters, formatters, -and actions are already configured and ready to use. - -To use the project template, run the `init.py` script to give the project a -name and some metadata. The script can then be removed afterward and the -`setup.cfg` file adjusted. - -## Installation - -```shell -pip install clingexplaid -``` - ## Usage -```shell +Run the following for basic usage information: + +```bash clingexplaid -h ``` -Compute Minimal Unsatisfiable Core from unsatisfiable program: +The clingexplaid CLI (based on the `clingo.Application` class) can be called using this generic command. -```shell -clingexplaid --assumption-signature signature/arity +```bash +clingexplaid ``` -+ `--assumption-signature` is optional to allow for only specific facts to be transformed to assumptions - + if no such option is given all facts are transformed to assumptions regardless of their signature ++ ``: has to be replaced by a list of all files or a single filename ++ ``: defines how many models are computed (Default=`1`, All=`0`) ++ ``: specifies which Clingexplaid method is used (Required) + + Options: + + `--muc`: + + Computes the Minimal Unsatisfiable Cores (MUCs) of the provided unsatisfiable program + + `--unsat-constraints`: + + Computes the Unsatisfiable Constraints of the unsatisfiable program provided. + + `--show-decisions`: + + Visualizes the decision process of clasp ++ ``: Additional options for the different methods + + For `--muc`: + + `-a`, `--assumption-signature`: limits which facts of the current program are converted to choices/assumptions for + finding the MUCs (Default: all facts are converted) + + For `--show-decisions`: + + `--decision-signature`: limits which decisions are shown in the visualization (Default: all atom's decisions are + shown) + +### Examples + ++ A selection of examples can be found [here](examples) ## Development +### Installation + +To install the project, run + +```bash +pip install . +``` + + To improve code quality, we run linters, type checkers, and unit tests. The tools can be run using [nox]. We recommend installing nox using [pipx] to have it available globally: @@ -85,6 +102,79 @@ pre-commit install This blackens the source code whenever `git commit` is used. +## Problems and Limitations + +### Meta-encoding based approach (ASP-Approach) + +**Important Notes:** + ++ The Meta-encoding approach as it stands is not fully functional + +**Problem:** + + In the meta encoding all facts (or a selection matching a certain signature) are + transformed into assumptions which are then used as the assumption set for finding + the MUC + + During the MUC search when subsets of this assumption set are fixed for satisfiability + checking it is important that even though they are not fixed, the other assumptions + are not assumed as false but as undefined + + This is currently not possible with the meta-encoding, since assumptions are chosen + through a choice rule and all assumptions that aren't selected are defaulted to false + + This doesn't allow for properly checking if such subsets entail unsatisfiability and + thus prevents us from finding the proper MUCs + +### Specifying Assumption Set using only Signatures + +**Important Notes:** + ++ clingo-explaid provides the `--muc` mode which gives you Minimal Unsatisfiable Cores for a given set of assumption + signatures that can be defined with `-a` ++ These signatures though allow not always for finding the best fitting MUC for a given encoding, compared + to an assumption set generated by hand + +**Problem:** + ++ Imagine this [example encoding](examples/misc/bad_mucs.lp): + +```MATLAB +a(1..3). +:- a(X). + +unsat. +:- unsat. +``` + ++ So when I execute `clingexplaid examples/misc/bad_mucs.lp --muc 0` I get the MUCs: + +``` +MUC 1 +a(3) +MUC 2 +a(2) +MUC 3 +a(1) +MUC 4 +unsat +``` + ++ So you would generally expect that executing `clingexplaid examples/misc/bad_mucs.lp --muc 0 -a/1` would return the + first 3 found MUCs from before ++ But what actually happens is that there are no MUCs detected: + +``` +NO MUCS CONTAINED: The unsatisfiability of this program is not induced by the provided assumptions +UNSATISFIABLE +``` + ++ This is actually due to an implicit `(unsat, False)` in the first 3 MUCs that isn't printed ++ Since the standard mode of `--muc` converts all facts to choices when no `-a` is provided `a(1)`, `a(2)`, `a(3)`, + and `unsat` are all converted to choices ++ We know that for the program to become satisfiable `unsat` cannot be true (line 4) ++ But since it is provided as a fact the choice rule conversion is necessary for the iterative deletion algorithm to + find any MUCs ++ This holds vice versa for the last MUC 4 just so that all `a/1` need to be converted to choice rules for the MUC to be + found + + [doc]: https://potassco.org/clingo/python-api/current/ [nox]: https://nox.thea.codes/en/stable/index.html [pipx]: https://pypa.github.io/pipx/ diff --git a/TODO.md b/TODO.md new file mode 100644 index 0000000..9c8f594 --- /dev/null +++ b/TODO.md @@ -0,0 +1,59 @@ +## ToDos + +### Important Features + +`2023` + ++ [x] New CLI structure + + different modes: + + MUC + + UNSAT-CONSTRAINTS + + can be enabled through flags ++ [x] Iterative Deltion for Multiple MUCs + + variation of the QuickXplain algorithm : `SKIPPED` ++ [x] Finish unsat-constraints implementation for the API + +`2024 - JAN` + ++ [x] New option to enable verbose derivation output + + `--show-decisions` with more fine grained `--decision-signature` option ++ [x] Make `--show-decisions` its own mode ++ [x] Give a warning in Transformer if control is not grounded yet ++ [x] Documentation + + [x] Proper README + + [x] Docstrings for all API functions + + [x] CLI documentation with examples + + [x] Examples folder + + [x] Sudoku + + [x] Graph Coloring + + [x] N-Queens ++ [x] Error when calling `--muc` constants aren't properly kept: + + The problem was in `AssumptionTransformer` where get_assumptions didn't have proper access to constants defined over + the CL and the program constants ++ [x] `AssumptionTransformer` doesn't work properly on included files + + It actually did work fine + +`2024 - FEB` + ++ [x] In `--show-decisions` hide INTERNAL when `--decision-signature` is active ++ [x] cleanup `DecisionOrderPropagator` print functions ++ [x] Features for `--unsat-constraints` + + [x] File + Line (Clickable link) ++ [x] Confusing Optimization prints during `--muc` when finding mucs in optimized Programs ++ [x] File-Link test with space in filename + + with `urllib.parsequote` ++ [x] Write up why negated assumptions in MUC's are a problem + + One which is currently not addressed by clingo-explaid ++ [x] Remove minimization also from `--unsat-constaints` mode ++ [x] Change file identification to use `clingo.ast.Location` instead of the subtring search and own built file tree ++ [x] Add spaces around Link to make it clickable on MAC + +`2024 - MAR` + ++ [x] Add way for `-a` to allow for signatures without variables (`test/0`) + +### Extra Features ++ [ ] `--unsat-constraints`: + + [ ] Access comments in the same line as constraint + + [ ] Currently, for multiline constraints a line number cannot be found ++ [ ] Timeout diff --git a/doc/_static/css/custom.css b/doc/_static/css/custom.css new file mode 100644 index 0000000..b5215ee --- /dev/null +++ b/doc/_static/css/custom.css @@ -0,0 +1,33 @@ +div.admonition.example { + border-color: hsl(257, 20%, 50%); + background-color: hsl(257, 20%, 80%); +} + +div.admonition.example > .admonition-title { + color: white; + background-color: hsl(257, 20%, 50%); +} + +div.admonition.example > .admonition-title::before { + content: "\f2a7"; +} + +.sidebar-logo { + margin: inherit; +} +.sidebar-logo-container{ + max-width: 20%!important; + margin-top: 0.2rem; + margin-right: 0.2rem; + justify-content: flex-end; + display: flex; +} +.sidebar-brand-text { + text-align: center !important; + align-items: center; + display: inline-flex; +} + +.sidebar-brand { + flex-direction: row !important; +} diff --git a/doc/_static/logo-dark-mode.png b/doc/_static/logo-dark-mode.png new file mode 100644 index 0000000..81ed997 Binary files /dev/null and b/doc/_static/logo-dark-mode.png differ diff --git a/doc/_static/logo-light-mode.png b/doc/_static/logo-light-mode.png new file mode 100644 index 0000000..d2ac73d Binary files /dev/null and b/doc/_static/logo-light-mode.png differ diff --git a/doc/conf.py b/doc/conf.py index 4d4d9e0..80d51a4 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -9,7 +9,9 @@ import configparser import datetime import os +import sys +sys.path.insert(0, os.path.abspath(".")) # modules that autodock should mock # useful if some external dependencies are not satisfied at doc build time. autodoc_mock_imports = [] @@ -36,13 +38,18 @@ # the documentation. extensions = [ - "nbsphinx", - "sphinx_rtd_theme", + "furo.sphinxext", # Theme + "sphinx.ext.extlinks", + "sphinx.ext.mathjax", + "sphinx.ext.todo", + "sphinx.ext.viewcode", "sphinx.ext.autodoc", - "sphinx.ext.autosummary", "sphinx.ext.napoleon", "sphinx.ext.autosectionlabel", "sphinx.ext.intersphinx", + "sphinx.ext.autosummary", + "sphinx_copybutton", + "myst_parser", ] autosummary_generate = True @@ -63,32 +70,57 @@ # This pattern also affects html_static_path and html_extra_path. exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] +html_title = _meta["name"] # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = "sphinx_rtd_theme" +html_theme = "furo" html_theme_options = { - "canonical_url": "", # 'analytics_id': 'UA-XXXXXXX-1', # Provided by Google in your dashboard - "logo_only": False, - "display_version": True, - "display_version": True, - "prev_next_buttons_location": "bottom", - "style_external_links": False, + "light_logo": "logo-light-mode.png", + "dark_logo": "logo-dark-mode.png", + "footer_icons": [ + { + "name": "GitHub", + "url": _meta["url"], + "html": """ + + + + """, + "class": "", + }, + ], # Toc options - "collapse_navigation": True, - "sticky_navigation": False, - "navigation_depth": 4, - "includehidden": True, - "titles_only": False, } +# -- Options for Markdown files ---------------------------------------------- +# + +myst_enable_extensions = [ + "colon_fence", + "deflist", +] +myst_heading_anchors = 3 + +# -- Custom css +html_css_files = [ + "css/custom.css", +] + +html_static_path = ["_static"] + +add_module_names = False # Class names without full module path + # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". # html_static_path = ['_static'] + +copybutton_prompt_text = r">>> |\.\.\. |\$ |In \[\d*\]: | {2,5}\.\.\.: | {5,8}: " +copybutton_prompt_is_regexp = True diff --git a/doc/content/encodings/index.md b/doc/content/encodings/index.md new file mode 100644 index 0000000..2b8e2f8 --- /dev/null +++ b/doc/content/encodings/index.md @@ -0,0 +1,7 @@ +# ASP Encodings + +Descriptions of ASP encodings. + +```{toctree} +instance.md +``` diff --git a/doc/content/encodings/instance.md b/doc/content/encodings/instance.md new file mode 100644 index 0000000..f325dd4 --- /dev/null +++ b/doc/content/encodings/instance.md @@ -0,0 +1,32 @@ +# Instance + +## Constants +```{list-table} +:header-rows: 1 +:widths: 25 100 + +* - Name + - Description +* - `horizon` + - The description of a constant. +``` + +## Predicates + +### `predicate(X,Y)` + +Description + +```{admonition} Example +```prolog +predicate(1,2). +``` + +### `another_predicate(X)` + +Description + +```{admonition} Example +```prolog +another_predicate(1). +``` diff --git a/doc/content/installation.md b/doc/content/installation.md new file mode 100644 index 0000000..39da015 --- /dev/null +++ b/doc/content/installation.md @@ -0,0 +1,41 @@ +# Installation + +clingexplaid requires Python 3.8+. We recommend version 3.10. + +You can check a successful installation by running + +```console +$ clingexplaid -h +``` + +## Installing with pip + + +The python clingexplaid package can be found [here](https://github.com/krr-up/clingo-explaid). + +```console +$ pip install clingexplaid +``` + +## Development + +### Installing from source + +The project is hosted on [github](https://github.com/krr-up/clingo-explaid) and can +also be installed from source. + +```{warning} +We recommend this only for development purposes. +``` + +```{note} +The `setuptools` package is required to run the commands below. +``` + +Execute the following command in the top level clingexplaid directory: + +```console +$ git clone https://github.com/krr-up/clingo-explaid +$ cd clingexplaid +$ pip install -e .[all] +``` diff --git a/doc/content/quickstart.md b/doc/content/quickstart.md new file mode 100644 index 0000000..26bd656 --- /dev/null +++ b/doc/content/quickstart.md @@ -0,0 +1,11 @@ +# Quick start + +A simple explanation on how to use the system. + +```console +$ clingexplaid -h +``` + +```{tip} +Tips on how to use the system. +``` diff --git a/doc/index.md b/doc/index.md new file mode 100644 index 0000000..5971dba --- /dev/null +++ b/doc/index.md @@ -0,0 +1,9 @@ +# clingexplaid + +An example project template. + +```{toctree} +content/installation.md +content/quickstart.md +content/encodings/index.md +``` diff --git a/doc/index.rst b/doc/index.rst deleted file mode 100644 index ab7a307..0000000 --- a/doc/index.rst +++ /dev/null @@ -1,10 +0,0 @@ -The clingexplaid project -==================== - -An example project template. - -.. autosummary:: - :toctree: _autosummary - :recursive: - - clingexplaid diff --git a/examples/graph_coloring/README.md b/examples/graph_coloring/README.md new file mode 100644 index 0000000..ac07418 --- /dev/null +++ b/examples/graph_coloring/README.md @@ -0,0 +1,59 @@ +# Example : Graph Coloring + ++ This example encodes the classic graph coloring problem together with an unsatisfiable instance ++ Here for the sake of illustration some of the graph nodes are already assigned a color from the start, which is where the conflict occurs ++ Using `clingexplaid` we can discover the underlying Minimal Unsatisfiable Cores (MUCs) and their respective unsatisfiable constraints + + +## Visualization + +![](graph_coloring_example.svg) + +## Run + ++ Finding all MUCs + + ```bash + clingexplaid 0 encoding.lp instance.lp --muc -a assign/2 + ``` + + Expected Output: + + ```bash + MUC 1 + assign(1,green) assign(5,red) assign(7,green) + MUC 2 + assign(1,green) assign(2,blue) assign(5,red) + ``` + ++ Finding the unsatisfiable constraints + + ```bash + clingexplaid 0 encoding.lp instance.lp --unsat-constraints + ``` + + Expected Output: + + ```bash + Unsat Constraints + :- edge(N1,N2); assign(N1,C); assign(N2,C). + ``` + ++ Combined call with unsatisfiable constraints for every found MUC + ```bash + clingexplaid 0 encoding.lp instance.lp --muc --unsat-constraints -a assign/2 + ``` + + Expected Output: + + ```bash + MUC 1 + assign(1,green) assign(5,red) assign(7,green) + ├── Unsat Constraints + ├──:- edge(N1,N2); assign(N1,C); assign(N2,C). + MUC 2 + assign(1,green) assign(2,blue) assign(5,red) + ├── Unsat Constraints + ├──:- edge(N1,N2); assign(N1,C); assign(N2,C). + ``` + diff --git a/examples/graph_coloring/encoding.lp b/examples/graph_coloring/encoding.lp new file mode 100644 index 0000000..d8f947a --- /dev/null +++ b/examples/graph_coloring/encoding.lp @@ -0,0 +1,5 @@ +{assign(N, C): color(C)}=1 :- node(N). + +:- edge(N1, N2), assign(N1, C), assign(N2, C). + +#show assign/2. \ No newline at end of file diff --git a/examples/graph_coloring/graph_coloring_example.svg b/examples/graph_coloring/graph_coloring_example.svg new file mode 100644 index 0000000..23f9c81 --- /dev/null +++ b/examples/graph_coloring/graph_coloring_example.svg @@ -0,0 +1,4 @@ + + + +
1
1
3
3
5
5
4
4
2
2
6
6
9
9
8
8
7
7
1
1
3
3
5
5
4
4
2
2
6
6
9
9
8
8
7
7
Original
Original
MUC 1
MUC 1
1
1
3
3
5
5
4
4
2
2
6
6
9
9
8
8
7
7
MUC 2
MUC 2
Text is not SVG - cannot display
\ No newline at end of file diff --git a/examples/graph_coloring/instance.lp b/examples/graph_coloring/instance.lp new file mode 100644 index 0000000..35b5e8f --- /dev/null +++ b/examples/graph_coloring/instance.lp @@ -0,0 +1,20 @@ +node(1..9). + +edge(1,2). edge(1,3). edge(1,4). +edge(2,1). edge(2,3). +edge(3,1). edge(3,2). edge(3,5). +edge(4,1). edge(4,5). edge(4,8). +edge(5,3). edge(5,4). edge(5,6). edge(5,8). +edge(6,5). edge(6,9). +edge(7,8). +edge(8,7). edge(8,4). edge(8,5). edge(8,9). +edge(9,6). edge(9,8). + +color(red; green; blue). + +% pre assignments + +assign(1,green). +assign(2,blue). +assign(5,red). +assign(7,green). \ No newline at end of file diff --git a/examples/misc/bad_mucs.lp b/examples/misc/bad_mucs.lp new file mode 100644 index 0000000..b371215 --- /dev/null +++ b/examples/misc/bad_mucs.lp @@ -0,0 +1,5 @@ +a(1..3). +:- a(X). + +unsat. +:- unsat. \ No newline at end of file diff --git a/examples/misc/sat.lp b/examples/misc/sat.lp new file mode 100644 index 0000000..a704413 --- /dev/null +++ b/examples/misc/sat.lp @@ -0,0 +1,17 @@ +number(1..4). + +solution(X,Y,V) :- initial(X,Y,V). +{solution(X,Y,N): number(N)}=1 :- number(X) ,number(Y). +cage(X1,Y1,X2,Y2) :- solution(X1,Y1,_), solution(X2,Y2,_), + ((X1-1)/2)==((X2-1)/2), + ((Y1-1)/2)==((Y2-1)/2). + +:- solution(X,Y1,N), solution(X,Y2,N), Y1 != Y2. +:- solution(X1,Y,N), solution(X2,Y,N), X1 != X2. +:- cage(X1,Y1,X2,Y2), solution(X1,Y1,N), solution(X2,Y2,N), X1!=X2, Y1!=Y2. + +initial(1,1,1). +initial(2,2,2). +initial(3,3,3). + +#show solution/3. diff --git a/examples/misc/simple.lp b/examples/misc/simple.lp new file mode 100644 index 0000000..bc2baa3 --- /dev/null +++ b/examples/misc/simple.lp @@ -0,0 +1,4 @@ +a(1..5). +b(5..10). + +:- a(X), b(X). \ No newline at end of file diff --git a/examples/misc/soup.lp b/examples/misc/soup.lp new file mode 100644 index 0000000..5a14062 --- /dev/null +++ b/examples/misc/soup.lp @@ -0,0 +1,3 @@ +#include "test/blob.lp". + +soup(10). \ No newline at end of file diff --git a/examples/sudoku_4x4.lp b/examples/misc/sudoku_4x4.lp similarity index 81% rename from examples/sudoku_4x4.lp rename to examples/misc/sudoku_4x4.lp index 6dfbe05..6b7e0b2 100644 --- a/examples/sudoku_4x4.lp +++ b/examples/misc/sudoku_4x4.lp @@ -1,8 +1,8 @@ % SUDOKU EXAMPLE (4x4) - -#include "sudoku_encoding.lp". #const subgrid_size=2. +#include "sudoku_encoding_2.lp". + initial(1,1,4). initial(4,1,4). initial(1,4,2). diff --git a/examples/sudoku_encoding.lp b/examples/misc/sudoku_encoding.lp similarity index 94% rename from examples/sudoku_encoding.lp rename to examples/misc/sudoku_encoding.lp index bbc4630..3c927ca 100644 --- a/examples/sudoku_encoding.lp +++ b/examples/misc/sudoku_encoding.lp @@ -4,7 +4,7 @@ % #const subgrid_size=2/3. -number(1..subgrid_size**2). +number(1..2**2). solution(X,Y,V) :- initial(X,Y,V). {solution(X,Y,N): number(N)}=1 :- number(X) ,number(Y). diff --git a/examples/misc/sudoku_encoding_2.lp b/examples/misc/sudoku_encoding_2.lp new file mode 100644 index 0000000..22e5851 --- /dev/null +++ b/examples/misc/sudoku_encoding_2.lp @@ -0,0 +1,15 @@ +% ########### SUDOKU SOLVER ########### + +% GENERATING + +% #const subgrid_size=2/3. + +solution(X,Y,V) :- initial(X,Y,V). +{solution(X,Y,N): N=1..4}=1 :- X=1..4, Y=1..4. +cage(X1,Y1,X2,Y2):- solution(X1,Y1,_), solution(X2,Y2,_), ((X1-1)/subgrid_size)==((X2-1)/subgrid_size), ((Y1-1)/subgrid_size)==((Y2-1)/subgrid_size). + +:- solution(X,Y1,N), solution(X,Y2,N), Y1 != Y2. +:- solution(X1,Y,N), solution(X2,Y,N), X1 != X2. +:- cage(X1,Y1,X2,Y2), solution(X1,Y1,N), solution(X2,Y2,N), X1!=X2, Y1!=Y2. + +#show solution/3. diff --git a/examples/misc/test.lp b/examples/misc/test.lp new file mode 100644 index 0000000..2fb89da --- /dev/null +++ b/examples/misc/test.lp @@ -0,0 +1,3 @@ +a. b. c. + +{a; b}=1. diff --git a/examples/misc/test/blob.lp b/examples/misc/test/blob.lp new file mode 100644 index 0000000..f9461b3 --- /dev/null +++ b/examples/misc/test/blob.lp @@ -0,0 +1,3 @@ +#include "../test_inert.lp". + +blob(123). \ No newline at end of file diff --git a/examples/misc/test2.lp b/examples/misc/test2.lp new file mode 100644 index 0000000..04ae796 --- /dev/null +++ b/examples/misc/test2.lp @@ -0,0 +1,8 @@ +{a}. {b}. {c}. + +% {a; b}. + +unsat :- a, b. +unsat :- not a, not b. + +:- not unsat. diff --git a/examples/misc/test3.lp b/examples/misc/test3.lp new file mode 100644 index 0000000..88739da --- /dev/null +++ b/examples/misc/test3.lp @@ -0,0 +1,4 @@ +a. b. c. + +:- a, b. +:- not a, not b. diff --git a/examples/misc/test4.lp b/examples/misc/test4.lp new file mode 100644 index 0000000..2dccda4 --- /dev/null +++ b/examples/misc/test4.lp @@ -0,0 +1,13 @@ +{a}. {b}. {c}. + +% {a; b}. + +unsat :- a, b. +unsat :- not a, not b. + +:- not unsat. +:- not a, not b, not c. + +#heuristic a.[1,false] +#heuristic b.[1,false] +#heuristic c.[1,false] diff --git a/examples/misc/test_inert.lp b/examples/misc/test_inert.lp new file mode 100644 index 0000000..d179321 --- /dev/null +++ b/examples/misc/test_inert.lp @@ -0,0 +1 @@ +blub(42). \ No newline at end of file diff --git a/examples/misc/x.lp b/examples/misc/x.lp new file mode 100644 index 0000000..5378bb7 --- /dev/null +++ b/examples/misc/x.lp @@ -0,0 +1,32 @@ +#program base. +#const subgrid_size = 2. +solution(X,Y,V) :- initial(X,Y,V). +1 = { solution(X,Y,N): N = (1..4) } :- X = (1..4); Y = (1..4). +cage(X1,Y1,X2,Y2) :- solution(X1,Y1,_); solution(X2,Y2,_); ((X1-1)/subgrid_size) = ((X2-1)/subgrid_size); ((Y1-1)/subgrid_size) = ((Y2-1)/subgrid_size). +#false :- solution(X,Y1,N); solution(X,Y2,N); Y1 != Y2. +#false :- solution(X1,Y,N); solution(X2,Y,N); X1 != X2. +#false :- cage(X1,Y1,X2,Y2); solution(X1,Y1,N); solution(X2,Y2,N); X1 != X2; Y1 != Y2. +#show solution/3. +#program base. +%{ initial(1,1,4) }. +%{ initial(4,1,4) }. +%{ initial(1,4,2) }. +%{ initial(2,3,3) }. +%{ initial(3,3,1) }. +%{ initial(3,4,3) }. +#show _muc/1. +#show _assumption/1. +_assumption(initial(1,1,4)). +_muc(initial(1,1,4)) :- initial(1,1,4). +_assumption(initial(4,1,4)). +_muc(initial(4,1,4)) :- initial(4,1,4). +_assumption(initial(1,4,2)). +_muc(initial(1,4,2)) :- initial(1,4,2). +_assumption(initial(2,3,3)). +_muc(initial(2,3,3)) :- initial(2,3,3). +_assumption(initial(3,3,1)). +_muc(initial(3,3,1)) :- initial(3,3,1). +_assumption(initial(3,4,3)). +_muc(initial(3,4,3)) :- initial(3,4,3). +#show initial/3. + diff --git a/examples/misc/zero_arity_assumptions.lp b/examples/misc/zero_arity_assumptions.lp new file mode 100644 index 0000000..e009fe7 --- /dev/null +++ b/examples/misc/zero_arity_assumptions.lp @@ -0,0 +1,5 @@ +test. + +not_test :- not test. + +:- not not_test. \ No newline at end of file diff --git a/examples/queens/README.md b/examples/queens/README.md new file mode 100644 index 0000000..b162c44 --- /dev/null +++ b/examples/queens/README.md @@ -0,0 +1,54 @@ +# Example : N Queens Problem + ++ This example encodes the classic n queens problem ++ The correct problem encoding is provided with an unsatisfiable instance ++ Using `clingexplaid` we can discover the underlying Minimal Unsatisfiable Cores (MUCs) and their respective unsatisfiable constraints + + +## Visualization + +![](queens_example.svg) + +## Run + ++ Finding all MUCs + + ```bash + clingexplaid 0 encoding.lp instance.lp --muc -a queen/2 + ``` + + Expected Output: + + ```bash + MUC 1 + queen(1,1) queen(2,5) + ``` + ++ Finding the unsatisfiable constraints + + ```bash + clingexplaid 0 encoding.lp instance.lp --unsat-constraints + ``` + + Expected Output: + + ```bash + Unsat Constraints + :- 2 <= { queen((D-J),J) }; D = (2..(2*n)). + :- 2 <= { queen((D+J),J) }; D = ((1-n)..(n-1)). + ``` + ++ Combined call with unsatisfiable constraints for every found MUC + ```bash + clingexplaid 0 encoding.lp instance.lp --muc --unsat-constraints -a queen/2 + ``` + + Expected Output: + + ```bash + MUC 1 + queen(1,1) queen(2,5) + ├── Unsat Constraints + ├──:- 2 <= { queen((D-J),J) }; D = (2..(2*n)). + ``` + diff --git a/examples/queens/encoding.lp b/examples/queens/encoding.lp new file mode 100644 index 0000000..902106b --- /dev/null +++ b/examples/queens/encoding.lp @@ -0,0 +1,9 @@ +#const n=5. +number(1..n). +cell(X,Y) :- number(X), number(Y). + +1 { queen(X,Y): number(Y) } 1 :- number(X). +1 { queen(X,Y): number(X) } 1 :- number(Y). + +:- 2 { queen(D-J,J) }, D = 2..2*n. +:- 2 { queen(D+J,J) }, D = 1-n..n-1. \ No newline at end of file diff --git a/examples/queens/instance.lp b/examples/queens/instance.lp new file mode 100644 index 0000000..e8c874e --- /dev/null +++ b/examples/queens/instance.lp @@ -0,0 +1,3 @@ +queen(1,1). +queen(2,5). +queen(3,2). \ No newline at end of file diff --git a/examples/queens/queens_example.svg b/examples/queens/queens_example.svg new file mode 100644 index 0000000..b98e149 --- /dev/null +++ b/examples/queens/queens_example.svg @@ -0,0 +1,4 @@ + + + +
Original
Original
MUC 1
MUC 1
Text is not SVG - cannot display
\ No newline at end of file diff --git a/examples/sudoku/README.md b/examples/sudoku/README.md new file mode 100644 index 0000000..ddd6cf7 --- /dev/null +++ b/examples/sudoku/README.md @@ -0,0 +1,60 @@ +# Example : Sudoku + ++ This example is a 4 by 4 sudoku encoding which is given an unsatisfiable instance ++ Using `clingexplaid` we can discover the underlying Minimal Unsatisfiable Cores (MUCs) and their respective unsatisfiable constraints + +## Visualization + +![Sudoku 4x4 example with its associated MUCs](sudoku_example.svg) + +## Run + ++ Finding all MUCs + + ```bash + clingexplaid 0 encoding.lp instance.lp --muc -a initial/3 + ``` + Expected Output: + + ```bash + MUC 1 + initial(4,1,4) initial(3,3,1) initial(3,4,3) + MUC 2 + initial(1,1,4) initial(1,4,2) initial(2,3,3) initial(3,3,1) + MUC 3 + initial(1,1,4) initial(4,1,4) + ``` + ++ Finding the unsatisfiable constraints + + ```bash + clingexplaid 0 encoding.lp instance.lp --unsat-constraints + ``` + Expected Output: + + ```bash + Unsat Constraints + :- solution(X1,Y,N); solution(X2,Y,N); X1 != X2. + ``` + ++ Combined call with unsatisfiable constraints for every found MUC + ```bash + clingexplaid 0 encoding.lp instance.lp --muc --unsat-constraints -a initial/3 + ``` + + Expected Output: + + ```bash + MUC 1 + initial(4,1,4) initial(3,3,1) initial(3,4,3) + ├── Unsat Constraints + ├──:- solution(X,Y1,N); solution(X,Y2,N); Y1 != Y2. + MUC 2 + initial(1,1,4) initial(1,4,2) initial(2,3,3) initial(3,3,1) + ├── Unsat Constraints + ├──:- solution(X,Y1,N); solution(X,Y2,N); Y1 != Y2. + MUC 3 + initial(1,1,4) initial(4,1,4) + ├── Unsat Constraints + ├──:- solution(X1,Y,N); solution(X2,Y,N); X1 != X2. + ``` \ No newline at end of file diff --git a/examples/sudoku/encoding.lp b/examples/sudoku/encoding.lp new file mode 100644 index 0000000..4a17289 --- /dev/null +++ b/examples/sudoku/encoding.lp @@ -0,0 +1,13 @@ +number(1..4). + +solution(X,Y,V) :- initial(X,Y,V). +{solution(X,Y,N): number(N)}=1 :- number(X) ,number(Y). +cage(X1,Y1,X2,Y2) :- solution(X1,Y1,_), solution(X2,Y2,_), + ((X1-1)/2)==((X2-1)/2), + ((Y1-1)/2)==((Y2-1)/2). + +:- solution(X,Y1,N), solution(X,Y2,N), Y1 != Y2. +:- solution(X1,Y,N), solution(X2,Y,N), X1 != X2. +:- cage(X1,Y1,X2,Y2), solution(X1,Y1,N), solution(X2,Y2,N), X1!=X2, Y1!=Y2. + +#show solution/3. diff --git a/examples/sudoku/instance.lp b/examples/sudoku/instance.lp new file mode 100644 index 0000000..1ef5e02 --- /dev/null +++ b/examples/sudoku/instance.lp @@ -0,0 +1,6 @@ +initial(1,1,4). +initial(4,1,4). +initial(1,4,2). +initial(2,3,3). +initial(3,3,1). +initial(3,4,3). diff --git a/examples/sudoku/sudoku_example.svg b/examples/sudoku/sudoku_example.svg new file mode 100644 index 0000000..2414a8a --- /dev/null +++ b/examples/sudoku/sudoku_example.svg @@ -0,0 +1,4 @@ + + + +
4
4
3
3
2
2
4
4
1
1
3
3
4
4
3
3
2
2
4
4
1
1
3
3
4
4
3
3
2
2
4
4
1
1
3
3
4
4
3
3
2
2
4
4
1
1
3
3
Original
Original
MUC 3
MUC 3
MUC 1
MUC 1
MUC 2
MUC 2
Text is not SVG - cannot display
\ No newline at end of file diff --git a/init.py b/init.py deleted file mode 100755 index be05cc6..0000000 --- a/init.py +++ /dev/null @@ -1,68 +0,0 @@ -#!/usr/bin/env python3 -""" -Init script to rename project. -""" - -import os -import re - - -def read(prompt, regex): - """ - Read a string from command line. - - The string has to match the given regular expression. - """ - while True: - ret = input(prompt) - match = re.match(regex, ret) - if match is not None: - return ret - print(f"the project name has to match the regular expression: {regex}") - - -def main(): - """ - Rename the project. - """ - project = read("project name: ", r"^[a-z][a-z0-9_]*$") - author = read("author: ", r".+") - email = read("email: ", r".+") - - replacements = { - "fillname": project, - "": email, - "": author, - } - - def replace(filepath): - with open(filepath, "r", encoding="utf-8") as hnd: - content = hnd.read() - for key, val in replacements.items(): - content = content.replace(key, val) - with open(filepath, "w", encoding="utf-8") as hnd: - hnd.write(content) - - for rootpath in [os.path.join("src", "fillname"), "tests"]: - for dirpath, _, filenames in os.walk(rootpath): - for filename in filenames: - if not filename.endswith(".py"): - continue - filepath = os.path.join(dirpath, filename) - replace(filepath) - - for filepath in [ - "setup.cfg", - "noxfile.py", - "README.md", - "doc/index.rst", - ".pre-commit-config.yaml", - ".coveragerc", - ]: - replace(filepath) - - os.rename(os.path.join("src", "fillname"), os.path.join("src", project)) - - -if __name__ == "__main__": - main() diff --git a/noxfile.py b/noxfile.py index 386cbcc..c085be7 100644 --- a/noxfile.py +++ b/noxfile.py @@ -2,87 +2,91 @@ import nox -nox.options.sessions = "lint_flake8", "lint_pylint", "typecheck", "test" +nox.options.sessions = "lint_pylint", "typecheck", "test" EDITABLE_TESTS = True PYTHON_VERSIONS = None if "GITHUB_ACTIONS" in os.environ: - PYTHON_VERSIONS = ["3.7", "3.11"] + PYTHON_VERSIONS = ["3.12", "3.11", "3.9"] EDITABLE_TESTS = False -@nox.session -def format(session): - session.install("-e", ".[format]") - check = "check" in session.posargs - - autoflake_args = [ - "--in-place", - "--imports=clingexplaid", - "--ignore-init-module-imports", - "--remove-unused-variables", - "-r", - "src", - "tests", - ] - if check: - autoflake_args.remove("--in-place") - session.run("autoflake", *autoflake_args) - - isort_args = ["--profile", "black", "src", "tests"] - if check: - isort_args.insert(0, "--check") - isort_args.insert(1, "--diff") - session.run("isort", *isort_args) - - black_args = ["src", "tests"] - if check: - black_args.insert(0, "--check") - black_args.insert(1, "--diff") - session.run("black", *black_args) - - @nox.session def doc(session): + """ + Build the documentation. + + Accepts the following arguments: + - open: open documentation after build + - clean: clean up the build folder + - : build the given with the given + """ target = "html" options = [] + open_doc = "open" in session.posargs + clean = "clean" in session.posargs + + if open_doc: + session.posargs.remove("open") + if clean: + session.posargs.remove("clean") + if session.posargs: target = session.posargs[0] options = session.posargs[1:] session.install("-e", ".[doc]") session.cd("doc") + if clean: + session.run("rm", "-rf", "_build") session.run("sphinx-build", "-M", target, ".", "_build", *options) + if open_doc: + session.run("open", "_build/html/index.html") @nox.session -def lint_flake8(session): - session.install("-e", ".[lint_flake8]") - session.run("flake8", "src", "tests") +def dev(session): + """ + Create a development environment in editable mode. + + Activate it by running `source .nox/dev/bin/activate`. + """ + session.install("-e", ".[dev]") @nox.session def lint_pylint(session): + """ + Run pylint. + """ session.install("-e", ".[lint_pylint]") session.run("pylint", "clingexplaid", "tests") @nox.session def typecheck(session): + """ + Typecheck the code using mypy. + """ session.install("-e", ".[typecheck]") - session.run("mypy", "-p", "src.clingexplaid", "-p", "tests", "--namespace-packages", "--ignore-missing-imports") + session.run("mypy", "--strict", "-p", "clingexplaid", "-p", "tests") @nox.session(python=PYTHON_VERSIONS) def test(session): - args = ['.[test]'] - if EDITABLE_TESTS: - args.insert(0, '-e') - session.install(*args) - session.run("coverage", "run", "-m", "unittest", "discover", "-v") - session.run("coverage", "report", "-m", "--fail-under=100") + """ + Run the tests. + Accepts an additional arguments which are passed to the unittest module. + This can for example be used to selectively run test cases. + """ -@nox.session -def dev(session): - session.install("-e", ".[dev]") + args = [".[test]"] + if EDITABLE_TESTS: + args.insert(0, "-e") + session.install(*args) + if session.posargs: + session.run("coverage", "run", "-m", "unittest", session.posargs[0], "-v") + else: + session.run("coverage", "run", "-m", "unittest", "discover", "-v") + session.run("coverage", "report", "-m", "--fail-under=100") diff --git a/pyproject.toml b/pyproject.toml index d214b53..81e74e8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,3 +4,88 @@ requires = [ "setuptools-scm", ] build-backend = "setuptools.build_meta" + +[project] +name = "clingexplaid" +authors = [ + { name = "Hannes Weichelt", email = "hweichelt@uni-potsdam.de" } +] +description = "A template project." +requires-python = ">=3.9" +license = {file = "LICENSE"} +dynamic = [ "version" ] +readme = "README.md" +dependencies = [ + "clingo>=5.7.1", + "autoflake", +] + +[project.urls] +Homepage = "https://github.com/krr-up/clingo-explaid" + +[project.optional-dependencies] +format = [ "black", "isort", "autoflake" ] +lint_pylint = [ "pylint" ] +typecheck = [ "types-setuptools", "mypy" ] +test = [ "coverage[toml]" ] +doc = [ "sphinx", "furo", "nbsphinx", "sphinx_copybutton", "myst-parser" ] +dev = [ "clingexplaid[test,typecheck,lint_pylint]" ] + +[project.scripts] +clingexplaid = "clingexplaid.__main__:main" + +[tool.setuptools.packages.find] +where = ["src"] + +[tool.setuptools_scm] +version_scheme = "python-simplified-semver" +local_scheme = "no-local-version" + +[tool.isort] +profile = "black" +line_length = 120 + +[tool.black] +line-length = 120 + +[tool.pylint.format] +max-line-length = 120 + +[tool.pylint.design] +max-args = 10 +max-attributes = 7 +max-bool-expr = 5 +max-branches = 12 +max-locals = 30 +max-parents = 7 +max-public-methods = 20 +max-returns = 10 +max-statements = 50 +min-public-methods = 1 + +[tool.pylint.similarities] +ignore-comments = true +ignore-docstrings = true +ignore-imports = true +ignore-signatures = true + +[tool.pylint.basic] +argument-rgx = "^[a-z][a-z0-9]*((_[a-z0-9]+)*_?)?$" +variable-rgx = "^[a-z][a-z0-9]*((_[a-z0-9]+)*_?)?$" +good-names = ["_"] + +[tool.coverage.run] +source = ["clingexplaid", "tests"] +omit = [ + "*/clingexplaid/__main__.py", + "*/clingexplaid/cli/*", + "*/clingexplaid/propagators/*", + "*/clingexplaid/transformers/__init__.py", + "*/clingexplaid/muc/__init__.py", + "*/clingexplaid/unsat_constraints/__init__.py", + "*/tests/*", + "*/constants.py" +] + +[tool.coverage.report] +exclude_lines = ["assert", "nocoverage"] diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 54f9cc3..0000000 --- a/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -cffi==1.15.1 -clingo==5.6.2 -pycparser==2.21 diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index d703afc..0000000 --- a/setup.cfg +++ /dev/null @@ -1,50 +0,0 @@ -[metadata] -name = clingexplaid -version = 1.0.0 -author = Hannes Weichelt -author_email = weichelt.h@uni-potsdam.de -description = Tools for Explaination with clingo. -long_description = file: README.md -long_description_content_type = text/markdown -license = MIT -url = https://github.com/krr-up/clingo-explaid - -[options] -packages = find: -package_dir = - =src -include_package_data = True -install_requires = - importlib_metadata;python_version<'3.8' - clingo>=5.6.0 - autoflake - -[options.packages.find] -where = src - -[options.extras_require] -format = - black - isort - autoflake -lint_flake8 = - flake8 - flake8-black - flake8-isort -lint_pylint = - pylint -typecheck = - types-setuptools - mypy -test = - coverage -doc = - sphinx - sphinx_rtd_theme - nbsphinx -dev = - clingexplaid[test,typecheck,lint_pylint,lint_flake8] - -[options.entry_points] -console_scripts = - clingexplaid = clingexplaid.__main__:main diff --git a/setup.py b/setup.py deleted file mode 100644 index 9ab6a32..0000000 --- a/setup.py +++ /dev/null @@ -1,6 +0,0 @@ -""" -This is provided for compatibility. -""" -from setuptools import setup - -setup() diff --git a/src/clingexplaid/__init__.py b/src/clingexplaid/__init__.py index e69de29..b0cbfd4 100644 --- a/src/clingexplaid/__init__.py +++ b/src/clingexplaid/__init__.py @@ -0,0 +1,3 @@ +""" +The clingexplaid project. +""" diff --git a/src/clingexplaid/__main__.py b/src/clingexplaid/__main__.py index 570f764..86244c8 100644 --- a/src/clingexplaid/__main__.py +++ b/src/clingexplaid/__main__.py @@ -6,15 +6,15 @@ from clingo.application import clingo_main -from clingexplaid.utils.cli import CoreComputerApp +from .cli.clingo_app import ClingoExplaidApp -def main(): +def main() -> None: """ - Main function calling the application class + Run the main function. """ - clingo_main(CoreComputerApp(sys.argv[0]), sys.argv[1:] + ["-V0"]) - sys.exit() + + clingo_main(ClingoExplaidApp(sys.argv[0]), sys.argv[1:] + ["-V0"]) if __name__ == "__main__": diff --git a/src/clingexplaid/cli/__init__.py b/src/clingexplaid/cli/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/clingexplaid/cli/clingo_app.py b/src/clingexplaid/cli/clingo_app.py new file mode 100644 index 0000000..e6b6b83 --- /dev/null +++ b/src/clingexplaid/cli/clingo_app.py @@ -0,0 +1,356 @@ +""" +App Module: clingexplaid CLI clingo app +""" + +import re +import sys +from importlib.metadata import version +from pathlib import Path +from typing import Dict, List, Tuple, Optional, Set, Callable, Sequence +from warnings import warn + + +import clingo +from clingo.application import Application, Flag + +from ..muc import CoreComputer +from ..propagators import DecisionOrderPropagator +from ..unsat_constraints import UnsatConstraintComputer +from ..utils import ( + get_constants_from_arguments, +) +from ..utils.logging import BACKGROUND_COLORS, COLORS +from ..transformers import AssumptionTransformer, OptimizationRemover + + +HYPERLINK_MASK = "\033]8;{};{}\033\\{}\033]8;;\033\\" + + +class ClingoExplaidApp(Application): + """ + Application class for executing clingo-explaid functionality on the command line + """ + + # pylint: disable = too-many-instance-attributes + + CLINGEXPLAID_METHODS = { + "muc": "Description for MUC method", + "unsat-constraints": "Description for unsat-constraints method", + "show-decisions": "Visualize the decision process of clingo during solving", + } + + def __init__(self, name: str) -> None: + # pylint: disable = unused-argument + self.methods: Set[str] = set() + self.method_functions: Dict[str, Callable] = { # type: ignore + m: getattr(self, f'_method_{m.replace("-", "_")}') for m in self.CLINGEXPLAID_METHODS + } + self.method_flags: Dict[str, Flag] = {m: Flag() for m in self.CLINGEXPLAID_METHODS} + self.argument_constants: Dict[str, str] = {} + + # SHOW DECISIONS + self._show_decisions_decision_signatures: Dict[str, int] = {} + self._show_decisions_model_id: int = 1 + + # MUC + self._muc_assumption_signatures: Dict[str, int] = {} + self._muc_id: int = 1 + + def _initialize(self) -> None: + # add enabled methods to self.methods + for method, flag in self.method_flags.items(): + if flag.flag: + self.methods.add(method) + + if len(self.methods) == 0: + raise ValueError( + f"Clingexplaid was called without any method, please select at least one of the following methods: " + f"[{', '.join(['--' + str(m) for m in self.CLINGEXPLAID_METHODS])}]" + ) + + @staticmethod + def _parse_signature(signature_string: str) -> Tuple[str, int]: + match_result = re.match(r"^([a-zA-Z]+)/([0-9]+)$", signature_string) + if match_result is None: + raise ValueError("Wrong signature Format") + return match_result.group(1), int(match_result.group(2)) + + def _parse_assumption_signature(self, assumption_signature: str) -> bool: + if not self.method_flags["muc"]: + print("PARSE ERROR: The assumption signature option is only available if the flag --muc is enabled") + return False + assumption_signature_string = assumption_signature.replace("=", "").strip() + try: + signature, arity = self._parse_signature(assumption_signature_string) + except ValueError: + print( + "PARSE ERROR: Wrong signature format. The assumption signatures have to follow the format " + "/" + ) + return False + self._muc_assumption_signatures[signature] = arity + return True + + def _parse_decision_signature(self, decision_signature: str) -> bool: + if not self.method_flags["show-decisions"]: + print( + "PARSE ERROR: The decision signature option is only available if the flag --show-decisions is enabled" + ) + return False + decision_signature_string = decision_signature.replace("=", "").strip() + try: + signature, arity = self._parse_signature(decision_signature_string) + except ValueError: + print( + "PARSE ERROR: Wrong signature format. The decision signatures have to follow the format " + "/" + ) + return False + self._show_decisions_decision_signatures[signature] = arity + return True + + def register_options(self, options: clingo.ApplicationOptions) -> None: + group = "Clingo-Explaid Methods" + + for method, description in self.CLINGEXPLAID_METHODS.items(): + options.add_flag( + group=group, + option=method, + description=description, + target=self.method_flags[method], + ) + + group = "MUC Options" + + options.add( + group, + "assumption-signature,a", + "Facts matching with this signature will be converted to assumptions for finding a MUC " + "(default: all facts)", + self._parse_assumption_signature, + multi=True, + ) + + group = "Show Decisions Options" + + options.add( + group, + "decision-signature", + "When --show-decisions is enabled, show only decisions matching with this signature " + "(default: show all decisions)", + self._parse_decision_signature, + multi=True, + ) + + # group = "General Options" + + def _apply_assumption_transformer( + self, signatures: Dict[str, int], files: List[str] + ) -> Tuple[str, AssumptionTransformer]: + signature_set = set(self._muc_assumption_signatures.items()) if signatures else None + at = AssumptionTransformer(signatures=signature_set) + if not files: + program_transformed = at.parse_files("-") + else: + program_transformed = at.parse_files(files) + return program_transformed, at + + def _print_muc(self, muc_string: str) -> None: + print(f"{BACKGROUND_COLORS['BLUE']} MUC {BACKGROUND_COLORS['LIGHT_BLUE']} {self._muc_id} {COLORS['NORMAL']}") + print(f"{COLORS['BLUE']}{muc_string}{COLORS['NORMAL']}") + self._muc_id += 1 + + def _method_muc( + self, + control: clingo.Control, + files: List[str], + compute_unsat_constraints: bool = False, + ) -> None: + program_transformed, at = self._apply_assumption_transformer( + signatures=self._muc_assumption_signatures, files=files + ) + + # remove optimization statements + optr = OptimizationRemover() + program_transformed = optr.parse_string(program_transformed) + + control.add("base", [], program_transformed) + control.ground([("base", [])]) + + assumptions = at.get_assumptions(control, constants=self.argument_constants) + cc = CoreComputer(control, assumptions) + + max_models = int(control.configuration.solve.models) # type: ignore + print("Solving...") + + # Case: Finding a single MUC + if max_models == -1: + control.solve(assumptions=list(assumptions), on_core=cc.shrink) + + if cc.minimal is None: + print("SATISFIABLE: Instance has no MUCs") + return + if len(list(cc.minimal)) == 0: + print( + "NO MUCS CONTAINED: The unsatisfiability of this program is not induced by the provided assumptions" + ) + return + + muc_string = " ".join(cc.muc_to_string(cc.minimal)) + self._print_muc(muc_string) + + if compute_unsat_constraints: + self._method_unsat_constraints( + control=clingo.Control(), + files=files, + assumption_string=muc_string, + output_prefix_active=f"{COLORS['RED']}├──{COLORS['NORMAL']}", + output_prefix_passive=f"{COLORS['RED']}│ {COLORS['NORMAL']}", + ) + + # Case: Finding multiple MUCs + if max_models >= 0: + program_unsat = False + with control.solve(assumptions=list(assumptions), yield_=True) as solve_handle: + if not solve_handle.get().satisfiable: + program_unsat = True + + if program_unsat: + mucs = 0 + for muc in cc.get_multiple_minimal(max_mucs=max_models): + mucs += 1 + muc_string = " ".join(cc.muc_to_string(muc)) + self._print_muc(muc_string) + + if compute_unsat_constraints: + self._method_unsat_constraints( + control=clingo.Control(), + files=files, + assumption_string=muc_string, + output_prefix_active=f"{COLORS['RED']}├──{COLORS['NORMAL']}", + output_prefix_passive=f"{COLORS['RED']}│ {COLORS['NORMAL']}", + ) + if not mucs: + print( + "NO MUCS CONTAINED: The unsatisfiability of this program is not induced by the provided " + "assumptions" + ) + return + + def _print_unsat_constraints( + self, + unsat_constraints: Dict[int, str], + ucc: UnsatConstraintComputer, + prefix: Optional[str] = None, + ) -> None: + if prefix is None: + prefix = "" + print(f"{prefix}{BACKGROUND_COLORS['RED']} Unsat Constraints {COLORS['NORMAL']}") + for cid, constraint in unsat_constraints.items(): + location = ucc.get_constraint_location(cid) + if location is None: + warn(f"Couldn't find a corresponding file for constraint with id {cid}") + continue + relative_file_path = location.begin.filename + absolute_file_path = str(Path(relative_file_path).absolute().resolve()) + line_beginning = location.begin.line + line_end = location.end.line + line_string = ( + f"Line {line_beginning}" if line_beginning == line_end else f"Lines {line_beginning}-{line_end}" + ) + file_link = "file://" + absolute_file_path + if " " in absolute_file_path: + # If there's a space in the filename use a hyperlink + file_link = HYPERLINK_MASK.format("", file_link, file_link) + + if location is not None: + print( + f"{prefix}{COLORS['RED']}{constraint}" + f"{COLORS['GREY']} [ {file_link} ]({line_string}){COLORS['NORMAL']}" + ) + else: + print(f"{prefix}{COLORS['RED']}{constraint}{COLORS['NORMAL']}") + + def _method_unsat_constraints( + self, + control: clingo.Control, + files: List[str], + assumption_string: Optional[str] = None, + output_prefix_active: str = "", + output_prefix_passive: str = "", + ) -> None: + # register DecisionOrderPropagator if flag is enabled + if self.method_flags["show-decisions"]: + decision_signatures = set(self._show_decisions_decision_signatures.items()) + dop = DecisionOrderPropagator(signatures=decision_signatures, prefix=output_prefix_passive) + control.register_propagator(dop) # type: ignore + + ucc = UnsatConstraintComputer(control=control) + ucc.parse_files(files) + unsat_constraints = ucc.get_unsat_constraints(assumption_string=assumption_string) + self._print_unsat_constraints(unsat_constraints, ucc=ucc, prefix=output_prefix_active) + + def _print_model( + self, + model: clingo.Model, + prefix_active: str = "", + prefix_passive: str = "", + ) -> None: + print(prefix_passive) + print( + f"{prefix_active}" + f"{BACKGROUND_COLORS['LIGHT-GREY']}{COLORS['BLACK']} Model {COLORS['NORMAL']}{BACKGROUND_COLORS['GREY']} " + f"{self._show_decisions_model_id} {COLORS['NORMAL']} " + f"{model}" + ) + # print(f"{COLORS['BLUE']}{model}{COLORS['NORMAL']}") + print(prefix_passive) + self._show_decisions_model_id += 1 + + def _method_show_decisions( + self, + control: clingo.Control, + files: List[str], + ) -> None: + decision_signatures = set(self._show_decisions_decision_signatures.items()) + dop = DecisionOrderPropagator(signatures=decision_signatures) + control.register_propagator(dop) # type: ignore + for f in files: + control.load(f) + if not files: + control.load("-") + control.ground() + control.solve(on_model=lambda model: self._print_model(model, "├", "│")) + + def print_model(self, model: clingo.Model, _) -> None: # type: ignore + return + + def main(self, control: clingo.Control, files: Sequence[str]) -> None: + print("clingexplaid", "version", version("clingexplaid")) + self._initialize() + + # printing the input files + if not files: + print("Reading from -") + else: + print(f"Reading from {files[0]} {'...' if len(files) > 1 else ''}") + + self.argument_constants = get_constants_from_arguments(sys.argv) + + # standard case: only one method + if len(self.methods) == 1: + method = list(self.methods)[0] + method_function = self.method_functions[method] + method_function(control, files) + # special cases where specific pipelines have to be configured + elif self.methods == {"muc", "unsat-constraints"}: + self.method_functions["muc"](control, files, compute_unsat_constraints=True) + elif self.methods == {"muc", "unsat-constraints", "show-decisions"}: + self.method_functions["muc"](control, files, compute_unsat_constraints=True) + elif self.methods == {"unsat-constraints", "show-decisions"}: + self.method_functions["unsat-constraints"](control, files) + else: + print( + f"METHOD ERROR: the combination of the methods {[f'--{m}' for m in self.methods]} is invalid. " + f"Please remove the conflicting method flags" + ) diff --git a/src/clingexplaid/muc/__init__.py b/src/clingexplaid/muc/__init__.py new file mode 100644 index 0000000..37371fc --- /dev/null +++ b/src/clingexplaid/muc/__init__.py @@ -0,0 +1,9 @@ +""" +Minimal Unsatisfiable Core Utilities +""" + +from .core_computer import CoreComputer + +__all__ = [ + "CoreComputer", +] diff --git a/src/clingexplaid/muc/core_computer.py b/src/clingexplaid/muc/core_computer.py new file mode 100644 index 0000000..bd6b7cb --- /dev/null +++ b/src/clingexplaid/muc/core_computer.py @@ -0,0 +1,140 @@ +""" +MUC Module: Core Computer to get Minimal Unsatisfiable Cores +""" + +from itertools import chain, combinations +from typing import Optional, Set, Tuple, Generator, List, Dict + +import clingo + +from ..utils import get_solver_literal_lookup +from ..utils.types import Assumption, AssumptionSet, SymbolSet + + +class CoreComputer: + """ + A container class that allows for a passed program_string and assumption_set to compute a minimal unsatisfiable + core. + """ + + def __init__(self, control: clingo.Control, assumption_set: AssumptionSet): + self.control = control + self.assumption_set = assumption_set + self.literal_lookup = get_solver_literal_lookup(control=self.control) + self.minimal: Optional[AssumptionSet] = None + + def _solve(self, assumptions: Optional[AssumptionSet] = None) -> Tuple[bool, SymbolSet, SymbolSet]: + """ + Internal function that is used to make the single solver calls for finding the minimal unsatisfiable core. + """ + if assumptions is None: + assumptions = self.assumption_set + + with self.control.solve(assumptions=list(assumptions), yield_=True) as solve_handle: + satisfiable = bool(solve_handle.get().satisfiable) + model = solve_handle.model().symbols(atoms=True) if solve_handle.model() is not None else [] + core = {self.literal_lookup[literal_id] for literal_id in solve_handle.core()} + + return satisfiable, set(model), core + + def _compute_single_minimal(self, assumptions: Optional[AssumptionSet] = None) -> AssumptionSet: + """ + Function to compute a single minimal unsatisfiable core from the passed set of assumptions and the program of + the CoreComputer. If there is no minimal unsatisfiable core, since for example the program with assumptions + assumed is satisfiable, an empty set is returned. The algorithm that is used to compute this minimal + unsatisfiable core is the iterative deletion algorithm. + """ + if assumptions is None: + assumptions = self.assumption_set + + # check that the assumption set isn't empty + if not assumptions: + raise ValueError("A minimal unsatisfiable core cannot be computed on an empty assumption set") + + # check if the problem with the full assumption set is unsatisfiable in the first place, and if not skip the + # rest of the algorithm and return an empty set. + satisfiable, _, _ = self._solve(assumptions=assumptions) + if satisfiable: + return set() + + muc_members: Set[Assumption] = set() + working_set = set(assumptions) + + for assumption in assumptions: + # remove the current assumption from the working set + working_set.remove(assumption) + + satisfiable, _, _ = self._solve(assumptions=working_set.union(muc_members)) + # if the working set now becomes satisfiable without the assumption it is added to the muc_members + if satisfiable: + muc_members.add(assumption) + # every time we discover a new muc member we also check if all currently found muc members already + # suffice to make the instance unsatisfiable. If so we can stop the search sice we fund our muc. + if not self._solve(assumptions=muc_members)[0]: + break + + return muc_members + + def shrink(self, assumptions: Optional[AssumptionSet] = None) -> None: + """ + This function applies the unsatisfiable core minimization (`self._compute_single_minimal`) on the assumptions + set `assumptions` and stores the resulting MUC inside `self.minimal`. + """ + self.minimal = self._compute_single_minimal(assumptions=assumptions) + + def get_multiple_minimal(self, max_mucs: Optional[int] = None) -> Generator[AssumptionSet, None, None]: + """ + This function generates all minimal unsatisfiable cores of the provided assumption set. It implements the + generator pattern since finding all mucs of an assumption set is exponential in nature and the search might not + fully complete in reasonable time. The parameter `max_mucs` can be used to specify the maximum number of + mucs that are found before stopping the search. + """ + assumptions = self.assumption_set + assumption_powerset = chain.from_iterable( + combinations(assumptions, r) for r in reversed(range(len(list(assumptions)) + 1)) + ) + + found_sat: List[AssumptionSet] = [] + found_mucs: List[AssumptionSet] = [] + + for current_subset in (set(s) for s in assumption_powerset): + # skip if empty subset + if len(current_subset) == 0: + continue + # skip if an already found satisfiable subset is superset + if any(set(sat).issuperset(current_subset) for sat in found_sat): + continue + # skip if an already found muc is a subset + if any(set(muc).issubset(current_subset) for muc in found_mucs): + continue + + muc = self._compute_single_minimal(assumptions=current_subset) + + # if the current subset wasn't unsatisfiable store this info and continue + if len(list(muc)) == 0: + found_sat.append(current_subset) + continue + + # if iterative deletion finds a muc that wasn't discovered before update sets and yield + if muc not in found_mucs: + found_mucs.append(muc) + yield muc + # if the maximum muc amount is found stop search + if max_mucs is not None and len(found_mucs) == max_mucs: + break + + def muc_to_string(self, muc: AssumptionSet, literal_lookup: Optional[Dict[int, clingo.Symbol]] = None) -> Set[str]: + """ + Converts a MUC into a set containing the string representations of the contained assumptions + """ + # take class literal_lookup as default if no other is provided + if literal_lookup is None: + literal_lookup = self.literal_lookup + + muc_string = set() + for a in muc: + if isinstance(a, int): + muc_string.add(str(literal_lookup[a])) + else: + muc_string.add(str(a[0])) + return muc_string diff --git a/src/clingexplaid/propagators/__init__.py b/src/clingexplaid/propagators/__init__.py new file mode 100644 index 0000000..a7df86b --- /dev/null +++ b/src/clingexplaid/propagators/__init__.py @@ -0,0 +1,16 @@ +""" +Propagators for Explanation +""" + +# pragma: no cover + +from typing import List + +from .propagator_decision_order import DecisionOrderPropagator + +DecisionLevel = List[int] +DecisionLevelList = List[DecisionLevel] + +__all__ = [ + "DecisionOrderPropagator", +] diff --git a/src/clingexplaid/propagators/constants.py b/src/clingexplaid/propagators/constants.py new file mode 100644 index 0000000..edddc78 --- /dev/null +++ b/src/clingexplaid/propagators/constants.py @@ -0,0 +1,11 @@ +""" +Constant definitions for the propagators package +""" + +from ..utils.logging import COLORS + +UNKNOWN_SYMBOL_TOKEN = "INTERNAL" + +INDENT_START = "├─" +INDENT_STEP = f"─{COLORS['GREY']}┼{COLORS['NORMAL']}──" +INDENT_END = f"─{COLORS['GREY']}┤{COLORS['NORMAL']} " diff --git a/src/clingexplaid/propagators/propagator_decision_order.py b/src/clingexplaid/propagators/propagator_decision_order.py new file mode 100644 index 0000000..106c0fc --- /dev/null +++ b/src/clingexplaid/propagators/propagator_decision_order.py @@ -0,0 +1,176 @@ +""" +Propagator Module: Decision Order +""" + +from typing import Optional, Tuple, Set, Dict, List, Sequence, Union + +import clingo + +from .constants import UNKNOWN_SYMBOL_TOKEN, INDENT_STEP, INDENT_START, INDENT_END +from ..utils.logging import COLORS + + +class DecisionOrderPropagator: + """ + Propagator for showing the Decision Order of clingo + """ + + def __init__(self, signatures: Optional[Set[Tuple[str, int]]] = None, prefix: str = ""): + # pylint: disable=missing-function-docstring + self.slit_symbol_lookup: Dict[int, clingo.Symbol] = {} + self.signatures = signatures if signatures is not None else set() + self.prefix = prefix + + self.last_decisions: List[int] = [] + self.last_entailments: Dict[int, List[int]] = {} + + def init(self, init: clingo.PropagateInit) -> None: + """ + Method to initialize the Decision Order Propagator. Here the literals are added to the Propagator's watch list. + """ + for atom in init.symbolic_atoms: + program_literal = atom.literal + solver_literal = init.solver_literal(program_literal) + self.slit_symbol_lookup[solver_literal] = atom.symbol + + for atom in init.symbolic_atoms: + if len(self.signatures) > 0 and not any(atom.match(name=s, arity=a) for s, a in self.signatures): + continue + symbolic_atom = init.symbolic_atoms[atom.symbol] + if symbolic_atom is None: + continue # nocoverage + query_program_literal = symbolic_atom.literal + query_solver_literal = init.solver_literal(query_program_literal) + init.add_watch(query_solver_literal) + init.add_watch(-query_solver_literal) + + def _is_printed(self, symbol: Union[clingo.Symbol, str]) -> bool: + """ + Helper function to check if a specific symbol should be printed or not + """ + printed = True + # skip UNKNOWN print if signatures is set + if len(self.signatures) > 0 and symbol == UNKNOWN_SYMBOL_TOKEN: + printed = False # nocoverage + # skip if symbol signature is not in self.signatures + elif len(self.signatures) > 0 and symbol != UNKNOWN_SYMBOL_TOKEN: + # `symbol` can only be a `str` if it is the UNKNOWN_SYMBOL_TOKEN + if isinstance(symbol, str): # nocoverage + printed = False + elif not any(symbol.match(s, a) for s, a in self.signatures): # nocoverage + printed = False + + return printed + + def propagate(self, control: clingo.PropagateControl, changes: Sequence[int]) -> None: + """ + Propagate method the is called when one the registered literals is propagated by clasp. Here useful information + about the decision progress is recorded to be visualized later. + """ + # pylint: disable=unused-argument + decisions, entailments = self.get_decisions(control.assignment) + + print_level = 0 + for d in decisions: + print_level += 1 + if d in self.last_decisions: + continue + + decision_symbol = self.get_symbol(d) + decision_printed = self._is_printed(decision_symbol) + decision_negative = d < 0 + + # build decision indent string + decision_indent_string = INDENT_START + INDENT_STEP * (print_level - 1) + # print decision if it matches the signatures (if provided) + if decision_printed: + print( + f"{self.prefix}{decision_indent_string}" + f"[{['+', '-'][int(decision_negative)]}]" + f" {decision_symbol} " + f"[{d}]" + ) + + entailment_list = entailments[d] if d in entailments else [] + # build entailment indent string + entailment_indent_string = ( + (INDENT_START + INDENT_STEP * (print_level - 2) + INDENT_END) if print_level > 1 else "│ " + ) + for e in entailment_list: + # skip decision in entailments + if e == d: + continue # nocoverage + entailment_symbol = self.get_symbol(e) + entailment_printed = self._is_printed(entailment_symbol) + # skip if entailment symbol doesn't mach signatures (if provided) + if not entailment_printed: + continue # nocoverage + + entailment_negative = e < 0 + if decision_printed: + print( + f"{self.prefix}{entailment_indent_string}{COLORS['GREY']}" + f"[{['+', '-'][int(entailment_negative)]}] " + f"{entailment_symbol} " + f"[{e}]{COLORS['NORMAL']}" + ) + + self.last_decisions = decisions + self.last_entailments = entailments + + def undo(self, thread_id: int, assignment: clingo.Assignment, changes: Sequence[int]) -> None: + """ + This function is called when one of the solvers decisions is undone. + """ + # pylint: disable=unused-argument + + if len(self.last_decisions) < 1: + return # nocoverage + decision = self.last_decisions[-1] + decision_symbol = self.get_symbol(decision) + + # don't print decision undo if its signature is not matching the provided ones + printed = self._is_printed(decision_symbol) + + indent_string = INDENT_START + INDENT_STEP * (len(self.last_decisions) - 1) + if printed: + print(f"{self.prefix}{indent_string}{COLORS['RED']}[✕] {decision_symbol} [{decision}]{COLORS['NORMAL']}") + self.last_decisions = self.last_decisions[:-1] + + @staticmethod + def get_decisions(assignment: clingo.Assignment) -> Tuple[List[int], Dict[int, List[int]]]: + """ + Helper function to extract a list of decisions and entailments from a clingo propagator assignment. + """ + level = 0 + decisions = [] + entailments = {} + try: + while True: + decision = assignment.decision(level) + decisions.append(decision) + + trail = assignment.trail + level_offset_start = trail.begin(level) + level_offset_end = trail.end(level) + level_offset_diff = level_offset_end - level_offset_start + if level_offset_diff > 1: + entailments[decision] = trail[(level_offset_start + 1) : level_offset_end] + level += 1 + except RuntimeError: + return decisions, entailments + + def get_symbol(self, literal: int) -> Union[clingo.Symbol, str]: + """ + Helper function to get a literal's associated symbol. + """ + try: + if literal > 0: + symbol = self.slit_symbol_lookup[literal] + else: + # negate symbol + symbol = clingo.parse_term(str(self.slit_symbol_lookup[-literal])) + except KeyError: + # internal literals + return UNKNOWN_SYMBOL_TOKEN + return symbol diff --git a/src/clingexplaid/py.typed b/src/clingexplaid/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/src/clingexplaid/transformers/__init__.py b/src/clingexplaid/transformers/__init__.py new file mode 100644 index 0000000..7f374c1 --- /dev/null +++ b/src/clingexplaid/transformers/__init__.py @@ -0,0 +1,19 @@ +""" +Transformers for Explanation +""" + +from .transformer_assumption import AssumptionTransformer +from .transformer_constraint import ConstraintTransformer +from .transformer_fact import FactTransformer +from .transformer_optimization_remover import OptimizationRemover +from .transformer_rule_id import RuleIDTransformer +from .transformer_rule_splitter import RuleSplitter + +__all__ = [ + "AssumptionTransformer", + "ConstraintTransformer", + "FactTransformer", + "OptimizationRemover", + "RuleIDTransformer", + "RuleSplitter", +] diff --git a/src/clingexplaid/transformers/constants.py b/src/clingexplaid/transformers/constants.py new file mode 100644 index 0000000..e940e8b --- /dev/null +++ b/src/clingexplaid/transformers/constants.py @@ -0,0 +1,6 @@ +""" +Constant definitions for the transformers package +""" + +REMOVED_TOKEN = "__REMOVED__" +RULE_ID_SIGNATURE = "_rule" diff --git a/src/clingexplaid/transformers/exceptions.py b/src/clingexplaid/transformers/exceptions.py new file mode 100644 index 0000000..c97cf37 --- /dev/null +++ b/src/clingexplaid/transformers/exceptions.py @@ -0,0 +1,15 @@ +""" +Exceptions for the Transformers Module +""" + + +class UntransformedException(Exception): + """Exception raised if the get_assumptions method of an AssumptionTransformer is called before it is used to + transform a program. + """ + + +class NotGroundedException(Exception): + """Exception raised if the get_assumptions method of an AssumptionTransformer is called without the control object + having been grounded beforehand. + """ diff --git a/src/clingexplaid/transformers/transformer_assumption.py b/src/clingexplaid/transformers/transformer_assumption.py new file mode 100644 index 0000000..f1f10c9 --- /dev/null +++ b/src/clingexplaid/transformers/transformer_assumption.py @@ -0,0 +1,112 @@ +""" +Transformer Module: Assumption Transformer for converting facts to choices that can be assumed +""" + +from pathlib import Path +from typing import Dict, List, Optional, Sequence, Set, Tuple, Union + +import clingo +import clingo.ast as _ast + +from .exceptions import NotGroundedException, UntransformedException +from ..utils import match_ast_symbolic_atom_signature + + +class AssumptionTransformer(_ast.Transformer): + """ + A transformer that transforms facts that match with one of the signatures provided (no signatures means all facts) + into choice rules and also provides the according assumptions for them. + """ + + def __init__(self, signatures: Optional[Set[Tuple[str, int]]] = None): + self.signatures = signatures if signatures is not None else set() + self.fact_rules: List[str] = [] + self.transformed: bool = False + self.program_constants: Dict[str, str] = {} + + def visit_Rule(self, node: clingo.ast.AST) -> clingo.ast.AST: # pylint: disable=C0103 + """ + Transforms head of a rule into a choice rule if it is a fact and adheres to the given signatures. + """ + if node.head.ast_type != _ast.ASTType.Literal: + return node + if node.body: + return node + has_matching_signature = any( + match_ast_symbolic_atom_signature(node.head.atom, (name, arity)) for (name, arity) in self.signatures + ) + # if signatures are defined only transform facts that match them, else transform all facts + if self.signatures and not has_matching_signature: + return node + + self.fact_rules.append(str(node)) + + return _ast.Rule( + location=node.location, + head=_ast.Aggregate( + location=node.location, + left_guard=None, + elements=[node.head], + right_guard=None, + ), + body=[], + ) + + def visit_Definition(self, node: clingo.ast.AST) -> clingo.ast.AST: + """ + All defined constants of the program are stored in self.program_constants + """ + # pylint: disable=invalid-name + self.program_constants[node.name] = node.value.symbol + return node + + def parse_string(self, string: str) -> str: + """ + Function that applies the transformation to the `program_string` it's called with and returns the transformed + program string. + """ + out = [] + _ast.parse_string(string, lambda stm: out.append((str(self(stm))))) + self.transformed = True + return "\n".join(out) + + def parse_files(self, paths: Sequence[Union[str, Path]]) -> str: + """ + Parses the files and returns a string with the transformed program. + """ + out = [] + _ast.parse_files([str(p) for p in paths], lambda stm: out.append((str(self(stm))))) + self.transformed = True + return "\n".join(out) + + def get_assumptions(self, control: clingo.Control, constants: Optional[Dict[str, str]] = None) -> Set[int]: + """ + Returns the assumptions which were gathered during the transformation of the program. Has to be called after + a program has already been transformed. + """ + # Just taking the fact symbolic atoms of the control given doesn't work here since we anticipate that + # this control is ground on the already transformed program. This means that all facts are now choice rules + # which means we cannot detect them like this anymore. + if not self.transformed: + raise UntransformedException( + "The get_assumptions method cannot be called before a program has been transformed" + ) + # If the control has not been grounded yet except since without grounding we don't have access to the symbolic + # atoms. + if len(control.symbolic_atoms) == 0: + raise NotGroundedException( + "The get_assumptions method cannot be called before the control has been grounded" + ) + + constants = constants if constants is not None else {} + + all_constants = dict(self.program_constants) + all_constants.update(constants) + constant_strings = [f"-c {k}={v}" for k, v in all_constants.items()] if constants is not None else [] + fact_control = clingo.Control(constant_strings) + fact_control.add("base", [], "\n".join(self.fact_rules)) + fact_control.ground([("base", [])]) + fact_symbols = [sym.symbol for sym in fact_control.symbolic_atoms if sym.is_fact] + + symbol_to_literal_lookup = {sym.symbol: sym.literal for sym in control.symbolic_atoms} + return {symbol_to_literal_lookup[sym] for sym in fact_symbols if sym in symbol_to_literal_lookup} diff --git a/src/clingexplaid/transformers/transformer_constraint.py b/src/clingexplaid/transformers/transformer_constraint.py new file mode 100644 index 0000000..86b37f0 --- /dev/null +++ b/src/clingexplaid/transformers/transformer_constraint.py @@ -0,0 +1,72 @@ +""" +Transformer Module: Adding atoms to constraint heads to retrace the ones firing in the case of an unsatisfiable program. +""" + +from pathlib import Path +from typing import Sequence, Union, Dict + +import clingo +import clingo.ast as _ast + + +class ConstraintTransformer(_ast.Transformer): + """ + A Transformer that takes all constraint rules and adds an atom to their head to avoid deriving false through them. + """ + + def __init__(self, constraint_head_symbol: str, include_id: bool = False): + self._constraint_head_symbol = constraint_head_symbol + self._include_id = include_id + self._constraint_id = 1 + + self.constraint_location_lookup: Dict[int, clingo.ast.Location] = {} + + def visit_Rule(self, node: clingo.ast.AST) -> clingo.ast.AST: # pylint: disable=C0103 + """ + Adds a constraint_head_symbol atom to the head of every constraint. + """ + if node.head.ast_type != _ast.ASTType.Literal: + return node + if node.head.atom.ast_type != _ast.ASTType.BooleanConstant: + return node + if node.head.atom.value != 0: + return node + + arguments = [] + if self._include_id: + arguments = [_ast.SymbolicTerm(node.location, clingo.parse_term(str(self._constraint_id)))] + + head_symbol = _ast.Function( + location=node.location, + name=self._constraint_head_symbol, + arguments=arguments, + external=0, + ) + + # add constraint location to lookup indexed by the constraint id + self.constraint_location_lookup[self._constraint_id] = node.location + + # increase constraint id + self._constraint_id += 1 + + # insert id symbol into body of rule + node.head = head_symbol + return node.update(**self.visit_children(node)) + + def parse_string(self, string: str) -> str: + """ + Function that applies the transformation to the `program_string` it's called with and returns the transformed + program string. + """ + out = [] + _ast.parse_string(string, lambda stm: out.append((str(self(stm))))) + + return "\n".join(out) + + def parse_files(self, paths: Sequence[Union[str, Path]]) -> str: + """ + Parses the files and returns a string with the transformed program. + """ + out = [] + _ast.parse_files([str(p) for p in paths], lambda stm: out.append((str(self(stm))))) + return "\n".join(out) diff --git a/src/clingexplaid/transformers/transformer_fact.py b/src/clingexplaid/transformers/transformer_fact.py new file mode 100644 index 0000000..0b07be8 --- /dev/null +++ b/src/clingexplaid/transformers/transformer_fact.py @@ -0,0 +1,77 @@ +""" +Transformer Module: Fact Remover +""" + +from pathlib import Path +from typing import Optional, Sequence, Set, Tuple, Union + +import clingo +from clingo import ast + +from .constants import REMOVED_TOKEN +from ..utils import match_ast_symbolic_atom_signature + + +class FactTransformer(ast.Transformer): + """ + Transformer that removes all facts from a program that match provided signatures + """ + + # pylint: disable=duplicate-code + + def __init__(self, signatures: Optional[Set[Tuple[str, int]]] = None): + self.signatures = signatures if signatures is not None else set() + + def visit_Rule(self, node: clingo.ast.AST) -> clingo.ast.AST: # pylint: disable=C0103 + """ + Removes all facts from a program that match the given signatures (if none are given all facts are removed). + """ + if node.head.ast_type != ast.ASTType.Literal: + return node + if node.body: + return node + has_matching_signature = any( + match_ast_symbolic_atom_signature(node.head.atom, (name, arity)) for (name, arity) in self.signatures + ) + # if signatures are defined only transform facts that match them, else transform all facts + if self.signatures and not has_matching_signature: + return node + + return ast.Rule( + location=node.location, + head=ast.Function(location=node.location, name=REMOVED_TOKEN, arguments=[], external=0), + body=[], + ) + + @staticmethod + def post_transform(program_string: str) -> str: + """ + Helper function that is called after the transformation process for cleanup purposes + """ + # remove the transformed REMOVED_TOKENS from the resulting program string + rules = program_string.split("\n") + out = [] + for rule in rules: + if not rule.startswith(REMOVED_TOKEN): + out.append(rule) + return "\n".join(out) + + def parse_string(self, string: str) -> str: + """ + Function that applies the transformation to the `program_string` it's called with and returns the transformed + program string. + """ + out = [] + ast.parse_string(string, lambda stm: out.append(str(self(stm)))) + return self.post_transform("\n".join(out)) + + def parse_files(self, paths: Sequence[Union[str, Path]]) -> str: + """ + Parses the files and returns a string with the transformed program. + """ + out = [] + ast.parse_files( + [str(p) for p in paths], + lambda stm: out.append(str(self(stm))), + ) + return self.post_transform("\n".join(out)) diff --git a/src/clingexplaid/transformers/transformer_optimization_remover.py b/src/clingexplaid/transformers/transformer_optimization_remover.py new file mode 100644 index 0000000..af2b20d --- /dev/null +++ b/src/clingexplaid/transformers/transformer_optimization_remover.py @@ -0,0 +1,61 @@ +""" +Transformer Module: Removing all optimization statements +""" + +from pathlib import Path +from typing import Sequence, Union + +from clingo import ast + +from .constants import REMOVED_TOKEN + + +class OptimizationRemover(ast.Transformer): + """ + Transformer that removes all optimization statements + """ + + # pylint: disable=duplicate-code + + def visit_Minimize(self, node: ast.AST) -> ast.AST: # pylint: disable=C0103 + """ + Removes all facts from a program that match the given signatures (if none are given all facts are removed). + """ + return ast.Rule( + location=node.location, + head=ast.Function(location=node.location, name=REMOVED_TOKEN, arguments=[], external=0), + body=[], + ) + + @staticmethod + def post_transform(program_string: str) -> str: + """ + Helper function that is called after the transformation process for cleanup purposes + """ + # remove the transformed REMOVED_TOKENS from the resulting program string + rules = program_string.split("\n") + out = [] + for rule in rules: + if not rule.startswith(REMOVED_TOKEN): + out.append(rule) + return "\n".join(out) + + def parse_string(self, string: str) -> str: + """ + Function that applies the transformation to the `program_string` it's called with and returns the transformed + program string. + """ + out = [] + ast.parse_string(string, lambda stm: out.append(str(self(stm)))) + return self.post_transform("\n".join(out)) + + def parse_files(self, paths: Sequence[Union[str, Path]]) -> str: + """ + Parses the files and returns a string with the transformed program. + """ + out = [] + ast.parse_files( + [str(p) for p in paths], + lambda stm: out.append(str(self(stm))), + ) + return self.post_transform("\n".join(out)) diff --git a/src/clingexplaid/transformers/transformer_rule_id.py b/src/clingexplaid/transformers/transformer_rule_id.py new file mode 100644 index 0000000..b878bf8 --- /dev/null +++ b/src/clingexplaid/transformers/transformer_rule_id.py @@ -0,0 +1,78 @@ +""" +Transformer Module: Adding unique rule identifiers to the body of rules +""" + +from pathlib import Path +from typing import Optional, Set, Tuple, Union + +import clingo +import clingo.ast as _ast + +from .constants import RULE_ID_SIGNATURE + + +class RuleIDTransformer(_ast.Transformer): + """ + A Transformer that takes all the rules of a program and adds an atom with `self.rule_id_signature` in their bodys, + to make the original rule the generated them identifiable even after grounding. Additionally, a choice rule + containing all generated `self.rule_id_signature` atoms is added, which allows us to add assumptions that assume + them. This is done in order to not modify the original program's reasoning by assuming all `self.rule_id_signature` + atoms as True. + """ + + def __init__(self, rule_id_signature: str = RULE_ID_SIGNATURE): + self.rule_id = 0 + self.rule_id_signature = rule_id_signature + + def visit_Rule(self, node: clingo.ast.AST) -> clingo.ast.AST: # pylint: disable=C0103 + """ + Adds a rule_id_signature(id) atom to the body of every rule that is visited. + """ + # add for each rule a theory atom (self.rule_id_signature) with the id as an argument + symbol = _ast.Function( + location=node.location, + name=self.rule_id_signature, + arguments=[_ast.SymbolicTerm(node.location, clingo.parse_term(str(self.rule_id)))], + external=0, + ) + + # increase the rule_id by one after every transformed rule + self.rule_id += 1 + + # insert id symbol into body of rule + node.body.insert(len(node.body), symbol) + return node.update(**self.visit_children(node)) + + def _get_number_of_rules(self) -> int: + return self.rule_id - 1 if self.rule_id > 1 else self.rule_id + + def parse_string(self, string: str) -> str: + """ + Function that applies the transformation to the `program_string` it's called with and returns the transformed + program string. + """ + self.rule_id = 1 + out = [] + _ast.parse_string(string, lambda stm: out.append((str(self(stm))))) + out.append( + f"{{_rule(1..{self._get_number_of_rules()})}}" + f" % Choice rule to allow all _rule atoms to become assumptions" + ) + + return "\n".join(out) + + def parse_file(self, path: Union[str, Path], encoding: str = "utf-8") -> str: + """ + Parses the file at path and returns a string with the transformed program. + """ + with open(path, "r", encoding=encoding) as f: + return self.parse_string(f.read()) + + def get_assumptions(self, n_rules: Optional[int] = None) -> Set[Tuple[clingo.Symbol, bool]]: + """ + Returns the rule_id_signature assumptions depending on the number of rules contained in the transformed + program. Can only be called after parse_file has been executed before. + """ + if n_rules is None: + n_rules = self._get_number_of_rules() + return {(clingo.parse_term(f"{self.rule_id_signature}({rule_id})"), True) for rule_id in range(1, n_rules + 1)} diff --git a/src/clingexplaid/transformers/transformer_rule_splitter.py b/src/clingexplaid/transformers/transformer_rule_splitter.py new file mode 100644 index 0000000..e09145e --- /dev/null +++ b/src/clingexplaid/transformers/transformer_rule_splitter.py @@ -0,0 +1,100 @@ +""" +Transformer Module: Split Rules into dedicated body and head parts +""" + +import base64 +from pathlib import Path +from typing import Union, List + +import clingo +import clingo.ast as _ast + + +class RuleSplitter(_ast.Transformer): + """ + A transformer that is used to split rules into two. This is done using an intermediate predicate called `_body`, + which contains a base64 representation of the original rule and all body variable assignments for explanation + purposes. This intermediate predicate replaces the head of the original rule and a new rule with the old head and + the newly generated `_body` predicate as the body is also inserted. Use the `parse_string` method to apply this + transformer. + """ + + def __init__(self) -> None: + self.head_rules: List[clingo.ast.AST] = [] + + def visit_Rule(self, node: clingo.ast.AST) -> clingo.ast.AST: # pylint: disable=C0103 + """ + Replaces the head of every rule with the intermediate `_body` predicate and stores all new head rules using this + intermediary predicate in `self.head_rules` + """ + head = node.head + body = node.body + + if body: + # remove MUS literals from rule + cleaned_body_literals = [x for x in node.body if x.atom.symbol.name not in ("__mus__",)] + cleaned_body = "; ".join([str(l) for l in cleaned_body_literals]) + + # get all variables used in body (to later reference in head) + variables = set() + for lit in cleaned_body_literals: + arguments = lit.atom.symbol.arguments + if arguments: + for arg in arguments: + variables.add(arg) + + # convert the cleaned body to a base64 string + rule_body_string = cleaned_body + rule_body_string_bytes = rule_body_string.encode("ascii") + rule_body_base64_bytes = base64.b64encode(rule_body_string_bytes) + rule_body_base64 = rule_body_base64_bytes.decode("ascii") + + # create a new '_body' head for the original rule + new_head_arguments = [ + _ast.SymbolicTerm(node.location, clingo.parse_term(f'"{rule_body_base64}"')), + _ast.Function( + location=node.location, + name="", + arguments=sorted(variables), + external=0, + ), + ] + new_head = _ast.Function( + location=node.location, + name="_body", + arguments=new_head_arguments, + external=0, + ) + node.head = new_head + + # create new second rule that links the head with the '_body' matching predicate + new_head_rule = _ast.Rule( + location=node.location, + head=head, + body=[new_head], + ) + self.head_rules.append(new_head_rule) + + return node + + # default case + return node + + def parse_string(self, string: str) -> str: + """ + Function that applies the transformation to the `program_string` it's called with and returns the transformed + program string. + """ + self.head_rules = [] + out = [] + _ast.parse_string(string, lambda stm: out.append((str(self(stm))))) + out += [str(r) for r in self.head_rules] + + return "\n".join(out) + + def parse_file(self, path: Union[str, Path], encoding: str = "utf-8") -> str: + """ + Parses the file at path and returns a string with the transformed program. + """ + with open(path, "r", encoding=encoding) as f: + return self.parse_string(f.read()) diff --git a/src/clingexplaid/unsat_constraints/__init__.py b/src/clingexplaid/unsat_constraints/__init__.py new file mode 100644 index 0000000..59d320a --- /dev/null +++ b/src/clingexplaid/unsat_constraints/__init__.py @@ -0,0 +1,9 @@ +""" +Functionality for Unsat Constraints +""" + +from .unsat_constraint_computer import UnsatConstraintComputer + +__all__ = [ + "UnsatConstraintComputer", +] diff --git a/src/clingexplaid/unsat_constraints/constants.py b/src/clingexplaid/unsat_constraints/constants.py new file mode 100644 index 0000000..84b97f6 --- /dev/null +++ b/src/clingexplaid/unsat_constraints/constants.py @@ -0,0 +1,5 @@ +""" +Constant definitions for the unsat_constraints package +""" + +UNSAT_CONSTRAINT_SIGNATURE = "__unsat__" diff --git a/src/clingexplaid/unsat_constraints/unsat_constraint_computer.py b/src/clingexplaid/unsat_constraints/unsat_constraint_computer.py new file mode 100644 index 0000000..b61fcac --- /dev/null +++ b/src/clingexplaid/unsat_constraints/unsat_constraint_computer.py @@ -0,0 +1,123 @@ +""" +Unsat Constraint Utilities +""" + +import re +from typing import List, Optional, Dict + +import clingo +from clingo.ast import Location + +from .constants import UNSAT_CONSTRAINT_SIGNATURE +from ..transformers import ConstraintTransformer, FactTransformer, OptimizationRemover +from ..utils import get_signatures_from_model_string + + +class UnsatConstraintComputer: + """ + A container class that allows for a passed unsatisfiable program_string to identify the underlying constraints + making it unsatisfiable + """ + + def __init__( + self, + control: Optional[clingo.Control] = None, + ): + self.control = control if control is not None else clingo.Control() + self.program_transformed: Optional[str] = None + self.initialized: bool = False + + self._file_constraint_lookup: Dict[int, clingo.ast.Location] = {} + + def parse_string(self, program_string: str) -> None: + """ + Method to parse a provided program string + """ + ct = ConstraintTransformer(UNSAT_CONSTRAINT_SIGNATURE, include_id=True) + self.program_transformed = ct.parse_string(program_string) + self._file_constraint_lookup = ct.constraint_location_lookup + self.initialized = True + + def parse_files(self, files: List[str]) -> None: + """ + Method to parse a provided sequence of filenames + """ + ct = ConstraintTransformer(UNSAT_CONSTRAINT_SIGNATURE, include_id=True) + if not files: + program_transformed = ct.parse_files("-") # nocoverage + else: + program_transformed = ct.parse_files(files) + + # remove optimization statements + optr = OptimizationRemover() + program_transformed = optr.parse_string(program_transformed) + + self.program_transformed = program_transformed + self._file_constraint_lookup = ct.constraint_location_lookup + self.initialized = True + + def get_constraint_location(self, constraint_id: int) -> Optional[Location]: + """ + Method to get the file that a constraint (identified by its `constraint_id`) is located in. + """ + return self._file_constraint_lookup.get(constraint_id) + + def get_unsat_constraints(self, assumption_string: Optional[str] = None) -> Dict[int, str]: + """ + Method to get the unsat constraints of an initialized `UnsatConstraintComputer` Object. + """ + + # only execute if the UnsatConstraintComputer was properly initialized + if not self.initialized: + raise ValueError( + "UnsatConstraintComputer is not properly initialized. To do so call either `parse_files` " + "or `parse_string`." + ) + + program_string = str(self.program_transformed) + # if an assumption string is provided use a FactTransformer to remove interfering facts + if assumption_string is not None and len(assumption_string) > 0: + assumptions_signatures = get_signatures_from_model_string(assumption_string) + ft = FactTransformer(signatures=assumptions_signatures) + # first remove all facts from the programs matching the assumption signatures from the assumption_string + program_string = ft.parse_string(program_string) + # then add the assumed atoms as the only remaining facts + program_string += "\n" + ". ".join(assumption_string.split()) + "." + + # add minimization soft constraint to optimize for the smallest set of unsat constraints + minimizer_rule = f"#minimize {{1,X : {UNSAT_CONSTRAINT_SIGNATURE}(X)}}." + program_string = program_string + "\n" + minimizer_rule + + # create a rule lookup for every constraint in the program associated with it's unsat id + constraint_lookup = {} + for line in program_string.split("\n"): + id_re = re.compile( + f"{UNSAT_CONSTRAINT_SIGNATURE}[(]([1-9][0-9]*)[)]" + ) + match_result = id_re.match(line) + if match_result is None: + continue + constraint_id = match_result.group(1) + constraint_lookup[int(constraint_id)] = ( + str(line).replace(f"{UNSAT_CONSTRAINT_SIGNATURE}({constraint_id})", "").strip() + ) + + self.control.add("base", [], program_string) + self.control.ground([("base", [])]) + + with self.control.solve(yield_=True) as solve_handle: + model = solve_handle.model() + unsat_constraint_atoms = [] + while model is not None: + unsat_constraint_atoms = [ + a for a in model.symbols(atoms=True) if a.match(UNSAT_CONSTRAINT_SIGNATURE, 1, True) + ] + solve_handle.resume() + model = solve_handle.model() + unsat_constraints: Dict[int, str] = {} + for a in unsat_constraint_atoms: + constraint_id = a.arguments[0].number + constraint = str(constraint_lookup.get(constraint_id)) + unsat_constraints[constraint_id] = constraint + + return unsat_constraints diff --git a/src/clingexplaid/utils/__init__.py b/src/clingexplaid/utils/__init__.py index c804ea5..7c661b5 100644 --- a/src/clingexplaid/utils/__init__.py +++ b/src/clingexplaid/utils/__init__.py @@ -1,22 +1,17 @@ """ -Utilities +Utilities. """ -from typing import Dict, Iterable, Set, Tuple, Union +import re +from typing import Dict, Set, Tuple, List import clingo from clingo.ast import ASTType -SymbolSet = Set[clingo.Symbol] -Literal = Tuple[clingo.Symbol, bool] -LiteralSet = Set[Literal] -Assumption = Union[Literal, int] -AssumptionSet = Iterable[Assumption] - def match_ast_symbolic_atom_signature( ast_symbol: ASTType.SymbolicAtom, signature: Tuple[str, int] -): +) -> bool: """ Function to match the signature of an AST SymbolicAtom to a tuple containing a string and int value, representing a matching signature. @@ -44,3 +39,54 @@ def get_solver_literal_lookup(control: clingo.Control) -> Dict[int, clingo.Symbo match_ast_symbolic_atom_signature.__name__, get_solver_literal_lookup.__name__, ] + + +def get_signatures_from_model_string(model_string: str) -> Set[Tuple[str, int]]: + """ + This function returns a dictionary of the signatures/arities of all atoms of a model string. Model strings are of + the form: `"signature1(X1, ..., XN) ... signatureM(X1, ..., XK)"` + """ + signatures = set() + for atom_string in model_string.split(): + result = re.search(r"([^(]*)\(", atom_string) + if result is None: + signatures.add((atom_string, 0)) + continue + signature = result.group(1) + # calculate arity for the signature + arity = 0 + level = 0 + for c in atom_string.removeprefix(signature): + if c == "(": + level += 1 + elif c == ")": + level -= 1 + else: + if level == 1 and c == ",": + arity += 1 + # if arity is not 0 increase by 1 for the last remaining parameter that is not followed by a comma + # also increase arity by one for the case that there's only one parameter and no commas are contained just '(' + if arity > 0 or "(" in atom_string: + arity += 1 + signatures.add((signature, arity)) + return signatures + + +def get_constants_from_arguments(argument_vector: List[str]) -> Dict[str, str]: + """ + Function that is used to parse the command line argument vector to extract a dictionary of provided constants and + their values. For example "-c test=42" would be converted to {"test": "42"}. + """ + constants = {} + next_constant = False + for element in argument_vector: + if next_constant: + result = re.search(r"(.*)=(.*)", element) + if result is None: + continue + constants[result.group(1)] = result.group(2) + next_constant = False + if element in ("-c", "--const"): + next_constant = True + + return constants diff --git a/src/clingexplaid/utils/cli.py b/src/clingexplaid/utils/cli.py deleted file mode 100644 index 8a17f7f..0000000 --- a/src/clingexplaid/utils/cli.py +++ /dev/null @@ -1,93 +0,0 @@ -""" -Command Line Interface Utilities -""" - -import configparser -from pathlib import Path - -from clingo.application import Application - -from clingexplaid.utils import get_solver_literal_lookup -from clingexplaid.utils.logger import BACKGROUND_COLORS, COLORS -from clingexplaid.utils.muc import CoreComputer -from clingexplaid.utils.transformer import AssumptionTransformer - - -class CoreComputerApp(Application): - """ - Application class realizing the CoreComputer functionality of the `clingexplaid.utils.muc.CoreComputer` class. - """ - - program_name: str = "core-computer" - version: str = "0.1" - - def __init__(self, name): - # pylint: disable = unused-argument - self.signatures = {} - - def _parse_assumption_signature(self, input_string: str) -> bool: - # signature_strings = input_string.strip().split(",") - signature_list = input_string.split("/") - if len(signature_list) != 2: - print("Not valid format for signature, expected name/arity") - return False - self.signatures[signature_list[0]] = int(signature_list[1]) - return True - - def print_model(self, model, _): - return - - def register_options(self, options): - """ - See clingo.clingo_main(). - """ - - group = "MUC Options" - - options.add( - group, - "assumption-signature,a", - "Facts matching with this signature will be converted to assumptions for finding a MUC " - "(default: all facts)", - self._parse_assumption_signature, - multi=True, - ) - - def main(self, control, files): - setup_file_path = Path(__file__).parent.joinpath("../../../setup.cfg") - setup_config = configparser.ConfigParser() - setup_config.read(setup_file_path) - metadata = setup_config["metadata"] - print(metadata["name"], "version", metadata["version"]) - signature_set = set(self.signatures.items()) if self.signatures else None - at = AssumptionTransformer(signatures=signature_set) - if not files: - program_transformed = at.parse_files("-") - print("Reading from -") - else: - program_transformed = at.parse_files(files) - print(f"Reading from {files[0]} {'...' if len(files) > 1 else ''}") - - control.add("base", [], program_transformed) - control.ground([("base", [])]) - - literal_lookup = get_solver_literal_lookup(control) - - assumptions = at.get_assumptions(control) - - cc = CoreComputer(control, assumptions) - - print("Solving...") - control.solve(assumptions=list(assumptions), on_core=cc.shrink) - - if cc.minimal is None: - print("SATISFIABLE: Instance has no MUCs") - return - - result = " ".join([str(literal_lookup[a]) for a in cc.minimal]) - - muc_id = 1 - print( - f"{BACKGROUND_COLORS['BLUE']} MUC: {muc_id} {COLORS['NORMAL']}{COLORS['DARK_BLUE']}{COLORS['NORMAL']}" - ) - print(f"{COLORS['BLUE']}{result}{COLORS['NORMAL']}") diff --git a/src/clingexplaid/utils/logger.py b/src/clingexplaid/utils/logger.py deleted file mode 100644 index 0c174a5..0000000 --- a/src/clingexplaid/utils/logger.py +++ /dev/null @@ -1,65 +0,0 @@ -""" -Setup project wide loggers. -""" - -import logging -import sys - -COLORS = { - "GREY": "\033[90m", - "BLUE": "\033[94m", - "DARK_BLUE": "\033[34m", - "GREEN": "\033[92m", - "YELLOW": "\033[93m", - "RED": "\033[91m", - "NORMAL": "\033[0m", -} - -BACKGROUND_COLORS = { - "BLUE": "\033[44m", -} - - -class SingleLevelFilter(logging.Filter): - """ - Filter levels. - """ - - def __init__(self, passlevel, reject): - # pylint: disable=super-init-not-called - self.passlevel = passlevel - self.reject = reject - - def filter(self, record): - if self.reject: - return record.levelno != self.passlevel # nocoverage - - return record.levelno == self.passlevel - - -def setup_logger(name, level): - """ - Setup logger. - """ - - logger = logging.getLogger(name) - logger.propagate = False - logger.setLevel(level) - log_message_str = "{}%(levelname)s:{} - %(message)s{}" - - def set_handler(level, color): - handler = logging.StreamHandler(sys.stderr) - handler.addFilter(SingleLevelFilter(level, False)) - handler.setLevel(level) - formatter = logging.Formatter( - log_message_str.format(COLORS[color], COLORS["GREY"], COLORS["NORMAL"]) - ) - handler.setFormatter(formatter) - logger.addHandler(handler) - - set_handler(logging.INFO, "GREEN") - set_handler(logging.WARNING, "YELLOW") - set_handler(logging.DEBUG, "BLUE") - set_handler(logging.ERROR, "RED") - - return logger diff --git a/src/clingexplaid/utils/logging.py b/src/clingexplaid/utils/logging.py new file mode 100644 index 0000000..1798999 --- /dev/null +++ b/src/clingexplaid/utils/logging.py @@ -0,0 +1,90 @@ +""" +Setup project wide loggers. + +This is a thin wrapper around Python's logging module. It supports colored +logging. +""" + +import logging +from typing import TextIO + +NOTSET = logging.NOTSET +DEBUG = logging.DEBUG +INFO = logging.INFO +WARNING = logging.WARNING +ERROR = logging.ERROR +CRITICAL = logging.CRITICAL + +COLORS = { + "GREY": "\033[90m", + "BLUE": "\033[94m", + "GREEN": "\033[92m", + "YELLOW": "\033[93m", + "RED": "\033[91m", + "NORMAL": "\033[0m", + "DARK_RED": "\033[91m", + "BLACK": "\033[30m", +} + +BACKGROUND_COLORS = { + "BLUE": "\033[44m", + "LIGHT_BLUE": "\033[104m", + "RED": "\033[41m", + "WHITE": "\033[107m", + "GREY": "\033[100m", + "LIGHT-GREY": "\033[47m", +} + + +class SingleLevelFilter(logging.Filter): + """ + Filter levels. + """ + + passlevel: int + reject: bool + + def __init__(self, passlevel: int, reject: bool): + # pylint: disable=super-init-not-called + self.passlevel = passlevel + self.reject = reject + + def filter(self, record: logging.LogRecord) -> bool: + if self.reject: + return record.levelno != self.passlevel # nocoverage + + return record.levelno == self.passlevel + + +def configure_logging(stream: TextIO, level: int, use_color: bool) -> None: + """ + Configure application logging. + """ + + def format_str(color: str) -> str: + if use_color: + return f"{COLORS[color]}%(levelname)s:{COLORS['GREY']} - %(message)s{COLORS['NORMAL']}" + return "%(levelname)s: - %(message)s" # nocoverage + + def make_handler(level: int, color: str) -> "logging.StreamHandler[TextIO]": + handler = logging.StreamHandler(stream) + handler.addFilter(SingleLevelFilter(level, False)) + handler.setLevel(level) + formatter = logging.Formatter(format_str(color)) + handler.setFormatter(formatter) + return handler + + handlers = [ + make_handler(logging.INFO, "GREEN"), + make_handler(logging.WARNING, "YELLOW"), + make_handler(logging.DEBUG, "BLUE"), + make_handler(logging.ERROR, "RED"), + ] + logging.basicConfig(handlers=handlers, level=level) + + +def get_logger(name: str) -> logging.Logger: + """ + Get a logger with the given name. + """ + return logging.getLogger(name) diff --git a/src/clingexplaid/utils/muc.py b/src/clingexplaid/utils/muc.py deleted file mode 100644 index 1ae4309..0000000 --- a/src/clingexplaid/utils/muc.py +++ /dev/null @@ -1,98 +0,0 @@ -""" -Unsatisfiable Core Utilities -""" - -from typing import Optional, Set, Tuple - -import clingo - -from . import Assumption, AssumptionSet, SymbolSet, get_solver_literal_lookup - - -class CoreComputer: - """ - A container class that allows for a passed program_string and assumption_set to compute a minimal unsatisfiable - core. - """ - - def __init__(self, control: clingo.Control, assumption_set: AssumptionSet): - self.control = control - self.assumption_set = assumption_set - self.literal_lookup = get_solver_literal_lookup(control=self.control) - self.minimal: Optional[AssumptionSet] = None - - def _solve( - self, assumptions: Optional[AssumptionSet] = None - ) -> Tuple[bool, SymbolSet, SymbolSet]: - """ - Internal function that is used to make the single solver calls for finding the minimal unsatisfiable core. - """ - if assumptions is None: - assumptions = self.assumption_set - - with self.control.solve(assumptions=list(assumptions), yield_=True) as solve_handle: # type: ignore[union-attr] - satisfiable = bool(solve_handle.get().satisfiable) - model = ( - solve_handle.model().symbols(atoms=True) - if solve_handle.model() is not None - else [] - ) - core = { - self.literal_lookup[literal_id] for literal_id in solve_handle.core() - } - - return satisfiable, set(model), core - - def _compute_single_minimal( - self, assumptions: Optional[AssumptionSet] = None - ) -> AssumptionSet: - """ - Function to compute a single minimal unsatisfiable core from the passed set of assumptions and the program of - the CoreComputer. If there is not minimal unsatisfiable core, since for example the program with assumptions - assumed is satisfiable, an empty set is returned. The algorithm that is used to compute this minimal - unsatisfiable core is the iterative deletion algorithm. - """ - if assumptions is None: - assumptions = self.assumption_set - - # check that the assumption set isn't empty - if not assumptions: - raise ValueError( - "A minimal unsatisfiable core cannot be computed on an empty assumption set" - ) - - # check if the problem with the full assumption set is unsatisfiable in the first place, and if not skip the - # rest of the algorithm and return an empty set. - satisfiable, _, _ = self._solve(assumptions=assumptions) - if satisfiable: - return set() - - muc_members: Set[Assumption] = set() - working_set = set(assumptions) - - for assumption in self.assumption_set: - # remove the current assumption from the working set - working_set.remove(assumption) - - satisfiable, _, _ = self._solve(assumptions=working_set.union(muc_members)) - # if the working set now becomes satisfiable without the assumption it is added to the muc_members - if satisfiable: - muc_members.add(assumption) - # every time we discover a new muc member we also check if all currently found muc members already - # suffice to make the instance unsatisfiable. If so we can stop the search sice we fund our muc. - if not self._solve(assumptions=muc_members)[0]: - break - - return muc_members - - def shrink(self, assumptions: Optional[AssumptionSet] = None) -> None: - """ - This function applies the unsatisfiable core minimization (`self._compute_single_minimal`) on the assumptions - set `assumptions` and stores the resulting MUC inside `self.minimal`. - """ - self.minimal = self._compute_single_minimal(assumptions=assumptions) - - -__all__ = [ - CoreComputer.__name__, -] diff --git a/src/clingexplaid/utils/parser.py b/src/clingexplaid/utils/parser.py index 674aadf..ab1aa0e 100644 --- a/src/clingexplaid/utils/parser.py +++ b/src/clingexplaid/utils/parser.py @@ -2,19 +2,21 @@ The command line parser for the project. """ -import logging +import sys from argparse import ArgumentParser from textwrap import dedent -from typing import Any, cast +from typing import Any, Optional, cast -from pkg_resources import DistributionNotFound, require +from . import logging __all__ = ["get_parser"] -try: - VERSION = require("fillname")[0].version -except DistributionNotFound: # nocoverage - VERSION = "local" # nocoverage +if sys.version_info[1] < 8: + import importlib_metadata as metadata # nocoverage +else: + from importlib import metadata # nocoverage + +VERSION = metadata.version("clingexplaid") def get_parser() -> ArgumentParser: @@ -22,15 +24,14 @@ def get_parser() -> ArgumentParser: Return the parser for command line options. """ parser = ArgumentParser( - prog="fillname", + prog="clingexplaid", description=dedent( """\ - fillname + clingexplaid filldescription """ ), ) - levels = [ ("error", logging.ERROR), ("warning", logging.WARNING), @@ -38,7 +39,7 @@ def get_parser() -> ArgumentParser: ("debug", logging.DEBUG), ] - def get(levels, name): + def get(levels: list[tuple[str, int]], name: str) -> Optional[int]: for key, val in levels: if key == name: return val @@ -53,7 +54,5 @@ def get(levels, name): type=cast(Any, lambda name: get(levels, name)), ) - parser.add_argument( - "--version", "-v", action="version", version=f"%(prog)s {VERSION}" - ) + parser.add_argument("--version", "-v", action="version", version=f"%(prog)s {VERSION}") return parser diff --git a/src/clingexplaid/utils/transformer.py b/src/clingexplaid/utils/transformer.py deleted file mode 100644 index b12dc64..0000000 --- a/src/clingexplaid/utils/transformer.py +++ /dev/null @@ -1,239 +0,0 @@ -""" -Transformers for Explanation -""" - -from pathlib import Path -from typing import List, Optional, Sequence, Set, Tuple, Union - -import clingo -from clingo import ast as _ast - -from clingexplaid.utils import match_ast_symbolic_atom_signature - -RULE_ID_SIGNATURE = "_rule" - - -class UntransformedException(Exception): - """Exception raised if the get_assumptions method of an AssumptionTransformer is called before it is used to - transform a program. - """ - - -class RuleIDTransformer(_ast.Transformer): - """ - A Transformer that takes all the rules of a program and adds an atom with `self.rule_id_signature` in their bodys, - to make the original rule the generated them identifiable even after grounding. Additionally, a choice rule - containing all generated `self.rule_id_signature` atoms is added, which allows us to add assumptions that assume - them. This is done in order to not modify the original program's reasoning by assuming all `self.rule_id_signature` - atoms as True. - """ - - def __init__(self, rule_id_signature: str = RULE_ID_SIGNATURE): - self.rule_id = 0 - self.rule_id_signature = rule_id_signature - - def visit_Rule(self, node): # pylint: disable=C0103 - """ - Adds a rule_id_signature(id) atom to the body of every rule that is visited. - """ - # add for each rule a theory atom (self.rule_id_signature) with the id as an argument - symbol = _ast.Function( - location=node.location, - name=self.rule_id_signature, - arguments=[ - _ast.SymbolicTerm(node.location, clingo.parse_term(str(self.rule_id))) - ], - external=0, - ) - - # increase the rule_id by one after every transformed rule - self.rule_id += 1 - - # insert id symbol into body of rule - node.body.insert(len(node.body), symbol) - return node.update(**self.visit_children(node)) - - def _get_number_of_rules(self): - return self.rule_id - 1 if self.rule_id > 1 else self.rule_id - - def parse_string(self, string: str) -> str: - """ - Function that applies the transformation to the `program_string` it's called with and returns the transformed - program string. - """ - self.rule_id = 1 - out = [] - _ast.parse_string(string, lambda stm: out.append((str(self(stm))))) - out.append( - f"{{_rule(1..{self._get_number_of_rules()})}}" - f" % Choice rule to allow all _rule atoms to become assumptions" - ) - - return "\n".join(out) - - def parse_file(self, path: Union[str, Path], encoding: str = "utf-8") -> str: - """ - Parses the file at path and returns a string with the transformed program. - """ - with open(path, "r", encoding=encoding) as f: - return self.parse_string(f.read()) - - def get_assumptions( - self, n_rules: Optional[int] = None - ) -> Set[Tuple[clingo.Symbol, bool]]: - """ - Returns the rule_id_signature assumptions depending on the number of rules contained in the transformed - program. Can only be called after parse_file has been executed before. - """ - if n_rules is None: - n_rules = self._get_number_of_rules() - return { - (clingo.parse_term(f"{self.rule_id_signature}({rule_id})"), True) - for rule_id in range(1, n_rules + 1) - } - - -class AssumptionTransformer(_ast.Transformer): - """ - A transformer that transforms facts that match with one of the signatures provided (no signatures means all facts) - into choice rules and also provides the according assumptions for them. - """ - - def __init__(self, signatures: Optional[Set[Tuple[str, int]]] = None): - self.signatures = signatures if signatures is not None else set() - self.fact_rules: List[str] = [] - self.transformed: bool = False - - def visit_Rule(self, node): # pylint: disable=C0103 - """ - Transforms head of a rule into a choice rule if it is a fact and adheres to the given signatures. - """ - if node.head.ast_type != _ast.ASTType.Literal: - return node - if node.body: - return node - has_matching_signature = any( - match_ast_symbolic_atom_signature(node.head.atom, (name, arity)) - for (name, arity) in self.signatures - ) - # if signatures are defined only transform facts that match them, else transform all facts - if self.signatures and not has_matching_signature: - return node - - self.fact_rules.append(str(node)) - - return _ast.Rule( - location=node.location, - head=_ast.Aggregate( - location=node.location, - left_guard=None, - elements=[node.head], - right_guard=None, - ), - body=[], - ) - - def parse_string(self, string: str) -> str: - """ - Function that applies the transformation to the `program_string` it's called with and returns the transformed - program string. - """ - out = [] - _ast.parse_string(string, lambda stm: out.append((str(self(stm))))) - self.transformed = True - return "\n".join(out) - - def parse_files(self, paths: Sequence[Union[str, Path]]) -> str: - """ - Parses the files and returns a string with the transformed program. - """ - out = [] - _ast.parse_files( - [str(p) for p in paths], lambda stm: out.append((str(self(stm)))) - ) - self.transformed = True - return "\n".join(out) - - def get_assumptions(self, control: clingo.Control) -> Set[int]: - """ - Returns the assumptions which were gathered during the transformation of the program. Has to be called after - a program has already been transformed. - """ - # Just taking the fact symbolic atoms of the control given doesn't work here since we anticipate that - # this control is ground on the already transformed program. This means that all facts are now choice rules - # which means we cannot detect them like this anymore. - if not self.transformed: - raise UntransformedException( - "The get_assumptions method cannot be called before a program has been " - "transformed" - ) - fact_control = clingo.Control() - fact_control.add("base", [], "\n".join(self.fact_rules)) - fact_control.ground([("base", [])]) - fact_symbols = [ - sym.symbol for sym in fact_control.symbolic_atoms if sym.is_fact - ] - - symbol_to_literal_lookup = { - sym.symbol: sym.literal for sym in control.symbolic_atoms - } - return { - symbol_to_literal_lookup[sym] - for sym in fact_symbols - if sym in symbol_to_literal_lookup - } - - -class ConstraintTransformer(_ast.Transformer): - """ - A Transformer that takes all constraint rules and adds an atom to their head to avoid deriving false through them. - """ - - def __init__(self, constraint_head_symbol: str): - self.constraint_head_symbol = constraint_head_symbol - - def visit_Rule(self, node): # pylint: disable=C0103 - """ - Adds a constraint_head_symbol atom to the head of every constraint. - """ - if node.head.ast_type != _ast.ASTType.Literal: - return node - if node.head.atom.ast_type != _ast.ASTType.BooleanConstant: - return node - if node.head.atom.value != 0: - return node - - head_symbol = _ast.Function( - location=node.location, - name=self.constraint_head_symbol, - arguments=[], - external=0, - ) - - # insert id symbol into body of rule - node.head = head_symbol - return node.update(**self.visit_children(node)) - - def parse_string(self, string: str) -> str: - """ - Function that applies the transformation to the `program_string` it's called with and returns the transformed - program string. - """ - out = [] - _ast.parse_string(string, lambda stm: out.append((str(self(stm))))) - - return "\n".join(out) - - def parse_file(self, path: Union[str, Path], encoding: str = "utf-8") -> str: - """ - Parses the file at path and returns a string with the transformed program. - """ - with open(path, "r", encoding=encoding) as f: - return self.parse_string(f.read()) - - -__all__ = [ - RuleIDTransformer.__name__, - AssumptionTransformer.__name__, - ConstraintTransformer.__name__, -] diff --git a/src/clingexplaid/utils/types.py b/src/clingexplaid/utils/types.py new file mode 100644 index 0000000..7ef7eb2 --- /dev/null +++ b/src/clingexplaid/utils/types.py @@ -0,0 +1,13 @@ +""" +Custom types for clingexplaid +""" + +from typing import Set, Tuple, Union, Iterable + +import clingo + +SymbolSet = Set[clingo.Symbol] +Literal = Tuple[clingo.Symbol, bool] +LiteralSet = Set[Literal] +Assumption = Union[Literal, int] +AssumptionSet = Iterable[Assumption] diff --git a/tests/clingexplaid/res/test_program_constants.lp b/tests/clingexplaid/res/test_program_constants.lp new file mode 100644 index 0000000..17d5808 --- /dev/null +++ b/tests/clingexplaid/res/test_program_constants.lp @@ -0,0 +1,2 @@ +#const number=42. +#const message=helloworld. \ No newline at end of file diff --git a/tests/clingexplaid/res/test_program_decision_order.lp b/tests/clingexplaid/res/test_program_decision_order.lp new file mode 100644 index 0000000..63c3b84 --- /dev/null +++ b/tests/clingexplaid/res/test_program_decision_order.lp @@ -0,0 +1,4 @@ +{a}. +{b} :- a. +:- not a. +x. \ No newline at end of file diff --git a/tests/clingexplaid/res/test_program_multi_muc.lp b/tests/clingexplaid/res/test_program_multi_muc.lp new file mode 100644 index 0000000..d5bd004 --- /dev/null +++ b/tests/clingexplaid/res/test_program_multi_muc.lp @@ -0,0 +1,6 @@ +a(1..10). + +:- a(1), a(2). +:- a(5), a(8), a(3). +:- a(1), a(9). +:- a(1), a(9), a(4). \ No newline at end of file diff --git a/tests/clingexplaid/res/test_program_optimization.lp b/tests/clingexplaid/res/test_program_optimization.lp new file mode 100644 index 0000000..aad8c05 --- /dev/null +++ b/tests/clingexplaid/res/test_program_optimization.lp @@ -0,0 +1,5 @@ +1{a(1..10)}. +a_count(C) :- C=#count{X: a(X)}. +#minimize{C@2: a_count(C)}. +a_sum(S) :- S=#sum{X: a(X)}. +#maximize{S@1: a_sum(S)}. \ No newline at end of file diff --git a/tests/clingexplaid/res/test_program_rules.lp b/tests/clingexplaid/res/test_program_rules.lp new file mode 100644 index 0000000..3543e09 --- /dev/null +++ b/tests/clingexplaid/res/test_program_rules.lp @@ -0,0 +1,5 @@ +constant. +head(1) :- body(1). +head(2) :- body(2,X,Y). +head(3) :- body(X). +:- constraint. \ No newline at end of file diff --git a/tests/clingexplaid/res/test_program_unsat_constraints.lp b/tests/clingexplaid/res/test_program_unsat_constraints.lp new file mode 100644 index 0000000..8ded791 --- /dev/null +++ b/tests/clingexplaid/res/test_program_unsat_constraints.lp @@ -0,0 +1,4 @@ +{a;b;c;d}. + +:- a. +:- not a. \ No newline at end of file diff --git a/tests/clingexplaid/res/transformed_program_constraints_id.lp b/tests/clingexplaid/res/transformed_program_constraints_id.lp new file mode 100644 index 0000000..d7d339a --- /dev/null +++ b/tests/clingexplaid/res/transformed_program_constraints_id.lp @@ -0,0 +1,6 @@ +#program base. +{ a; b; c; d }. +x(1). +#true :- x(2). +unsat(1) :- a; b; c. +unsat(2) :- a; d. \ No newline at end of file diff --git a/tests/clingexplaid/res/transformed_program_facts.lp b/tests/clingexplaid/res/transformed_program_facts.lp new file mode 100644 index 0000000..356e5e0 --- /dev/null +++ b/tests/clingexplaid/res/transformed_program_facts.lp @@ -0,0 +1,5 @@ +#program base. +b(2) :- x. +c(3); c(4) :- x. +f(17); f(18) :- e(16). +x(19). \ No newline at end of file diff --git a/tests/clingexplaid/res/transformed_program_optimization.lp b/tests/clingexplaid/res/transformed_program_optimization.lp new file mode 100644 index 0000000..6e270d9 --- /dev/null +++ b/tests/clingexplaid/res/transformed_program_optimization.lp @@ -0,0 +1,4 @@ +#program base. +1 <= { a((1..10)) }. +a_count(C) :- C = #count { X: a(X) }. +a_sum(S) :- S = #sum { X: a(X) }. \ No newline at end of file diff --git a/tests/clingexplaid/res/transformed_program_rules_split.lp b/tests/clingexplaid/res/transformed_program_rules_split.lp new file mode 100644 index 0000000..7e745f2 --- /dev/null +++ b/tests/clingexplaid/res/transformed_program_rules_split.lp @@ -0,0 +1,10 @@ +#program base. +constant. +_body("Ym9keSgxKQ==",(1,)) :- body(1). +_body("Ym9keSgyLFgsWSk=",(X,Y,2)) :- body(2,X,Y). +_body("Ym9keShYKQ==",(X,)) :- body(X). +_body("Y29uc3RyYWludA==",()) :- constraint. +head(1) :- _body("Ym9keSgxKQ==",(1,)). +head(2) :- _body("Ym9keSgyLFgsWSk=",(X,Y,2)). +head(3) :- _body("Ym9keShYKQ==",(X,)). +#false :- _body("Y29uc3RyYWludA==",()). \ No newline at end of file diff --git a/tests/clingexplaid/test_main.py b/tests/clingexplaid/test_main.py index 926de46..35336a8 100644 --- a/tests/clingexplaid/test_main.py +++ b/tests/clingexplaid/test_main.py @@ -2,338 +2,22 @@ Test cases for main application functionality. """ -import random from pathlib import Path -from typing import List, Optional, Set, Tuple, Union +from typing import Union from unittest import TestCase -import clingo +TEST_DIR = parent = Path(__file__).resolve().parent -from clingexplaid.utils import AssumptionSet, get_solver_literal_lookup -from clingexplaid.utils.muc import CoreComputer -from clingexplaid.utils.transformer import ( - AssumptionTransformer, - ConstraintTransformer, - RuleIDTransformer, - UntransformedException, -) -TEST_DIR = parent = Path(__file__).resolve().parent +def read_file(path: Union[str, Path], encoding: str = "utf-8") -> str: + """ + Read file at path and return contents as string. + """ + with open(path, "r", encoding=encoding) as f: + return f.read() class TestMain(TestCase): """ Test cases for clingexplaid. """ - - @staticmethod - def read_file(path: Union[str, Path], encoding: str = "utf-8") -> str: - """ - Read file at path and return contents as string. - """ - with open(path, "r", encoding=encoding) as f: - return f.read() - - @staticmethod - def get_muc_of_program( - program_string: str, - assumption_signatures: Set[Tuple[str, int]], - control: Optional[clingo.Control] = None, - ) -> AssumptionSet: - """ - Helper function to directly get the MUC of a given program string. - """ - ctl = control if control is not None else clingo.Control() - - at = AssumptionTransformer(signatures=assumption_signatures) - transformed_program = at.parse_string(program_string) - - ctl.add("base", [], transformed_program) - ctl.ground([("base", [])]) - - assumptions = at.get_assumptions(ctl) - - cc = CoreComputer(ctl, assumptions) - ctl.solve(assumptions=list(assumptions), on_core=cc.shrink) - - # if the instance was satisfiable and the on_core function wasn't called an empty set is returned, else the muc. - result = cc.minimal if cc.minimal is not None else set() - - return result - - def assert_muc( - self, - muc: Set[Tuple[clingo.Symbol, bool]], - valid_mucs_string_lists: List[Set[str]], - ): - """ - Asserts if a MUC is one of several valid MUC's. - """ - valid_mucs = [ - {clingo.parse_term(s) for s in lit_strings} - for lit_strings in valid_mucs_string_lists - ] - self.assertIn(muc, valid_mucs) - - # TRANSFORMERS - # --- ASSUMPTION TRANSFORMER - - def test_assumption_transformer_parse_file(self): - """ - Test the AssumptionTransformer's `parse_file` method. - """ - program_path = TEST_DIR.joinpath("res/test_program.lp") - program_path_transformed = TEST_DIR.joinpath( - "res/transformed_program_assumptions_certain_signatures.lp" - ) - at = AssumptionTransformer(signatures={(c, 1) for c in "abcdef"}) - result = at.parse_files([program_path]) - self.assertEqual( - result.strip(), self.read_file(program_path_transformed).strip() - ) - - def test_assumption_transformer_parse_file_no_signatures(self): - """ - Test the AssumptionTransformer's `parse_file` method with no signatures provided. - """ - program_path = TEST_DIR.joinpath("res/test_program.lp") - program_path_transformed = TEST_DIR.joinpath( - "res/transformed_program_assumptions_all.lp" - ) - at = AssumptionTransformer() - result = at.parse_files([program_path]) - self.assertEqual( - result.strip(), self.read_file(program_path_transformed).strip() - ) - - def test_assumption_transformer_get_assumptions_before_transformation(self): - """ - Test the AssumptionTransformer's behavior when get_assumptions is called before transformation. - """ - at = AssumptionTransformer() - control = clingo.Control() - self.assertRaises(UntransformedException, lambda: at.get_assumptions(control)) - - # --- RULE ID TRANSFORMER - - def test_rule_id_transformer(self): - """ - Test the RuleIDTransformer's `parse_file` and `get_assumptions` methods. - """ - program_path = TEST_DIR.joinpath("res/test_program.lp") - program_path_transformed = TEST_DIR.joinpath( - "res/transformed_program_rule_ids.lp" - ) - rt = RuleIDTransformer() - result = rt.parse_file(program_path) - self.assertEqual( - result.strip(), self.read_file(program_path_transformed).strip() - ) - assumptions = { - (clingo.parse_term(s), True) - for s in [ - "_rule(1)", - "_rule(2)", - "_rule(3)", - "_rule(4)", - "_rule(5)", - "_rule(6)", - "_rule(7)", - ] - } - self.assertEqual(assumptions, rt.get_assumptions()) - - # --- CONSTRAINT TRANSFORMER - - def test_constraint_transformer(self): - """ - Test the ConstraintTransformer's `parse_file` method. - """ - program_path = TEST_DIR.joinpath("res/test_program_constraints.lp") - program_path_transformed = TEST_DIR.joinpath( - "res/transformed_program_constraints.lp" - ) - ct = ConstraintTransformer(constraint_head_symbol="unsat") - result = ct.parse_file(program_path) - self.assertEqual( - result.strip(), self.read_file(program_path_transformed).strip() - ) - - # MUC - - def test_core_computer_shrink_single_muc(self): - """ - Test the CoreComputer's `shrink` function with a single MUC. - """ - - ctl = clingo.Control() - - program = """ - a(1..5). - :- a(1), a(4), a(5). - """ - signatures = {("a", 1)} - - muc = self.get_muc_of_program( - program_string=program, assumption_signatures=signatures, control=ctl - ) - - literal_lookup = get_solver_literal_lookup(ctl) - - self.assert_muc({literal_lookup[a] for a in muc}, [{"a(1)", "a(4)", "a(5)"}]) - - def test_core_computer_shrink_single_atomic_muc(self): - """ - Test the CoreComputer's `shrink` function with a single atomic MUC. - """ - - ctl = clingo.Control() - - program = """ - a(1..5). - :- a(3). - """ - signatures = {("a", 1)} - - muc = self.get_muc_of_program( - program_string=program, assumption_signatures=signatures, control=ctl - ) - - literal_lookup = get_solver_literal_lookup(ctl) - - self.assert_muc({literal_lookup[a] for a in muc}, [{"a(3)"}]) - - def test_core_computer_shrink_multiple_atomic_mucs(self): - """ - Test the CoreComputer's `shrink` function with multiple atomic MUC's. - """ - - ctl = clingo.Control() - - program = """ - a(1..10). - :- a(3). - :- a(5). - :- a(9). - """ - signatures = {("a", 1)} - - muc = self.get_muc_of_program( - program_string=program, assumption_signatures=signatures, control=ctl - ) - - literal_lookup = get_solver_literal_lookup(ctl) - - self.assert_muc( - {literal_lookup[a] for a in muc}, [{"a(3)"}, {"a(5)"}, {"a(9)"}] - ) - - def test_core_computer_shrink_multiple_mucs(self): - """ - Test the CoreComputer's `shrink` function with multiple MUC's. - """ - - ctl = clingo.Control() - - program = """ - a(1..10). - :- a(3), a(9), a(5). - :- a(5), a(1), a(2). - :- a(9), a(2), a(7). - """ - signatures = {("a", 1)} - - muc = self.get_muc_of_program( - program_string=program, assumption_signatures=signatures, control=ctl - ) - - literal_lookup = get_solver_literal_lookup(ctl) - - self.assert_muc( - {literal_lookup[a] for a in muc}, - [ - {"a(3)", "a(9)", "a(5)"}, - {"a(5)", "a(1)", "a(2)"}, - {"a(9)", "a(2)", "a(7)"}, - ], - ) - - def test_core_computer_shrink_large_instance_random(self): - """ - Test the CoreComputer's `shrink` function with a large random assumption set. - """ - - ctl = clingo.Control() - - n_assumptions = 1000 - random_core = random.choices(range(n_assumptions), k=10) - program = f""" - a(1..{n_assumptions}). - :- {', '.join([f"a({i})" for i in random_core])}. - """ - signatures = {("a", 1)} - - muc = self.get_muc_of_program( - program_string=program, assumption_signatures=signatures, control=ctl - ) - - literal_lookup = get_solver_literal_lookup(ctl) - - self.assert_muc( - {literal_lookup[a] for a in muc}, [{f"a({i})" for i in random_core}] - ) - - def test_core_computer_shrink_satisfiable(self): - """ - Test the CoreComputer's `shrink` function with a satisfiable assumption set. - """ - - ctl = clingo.Control() - - program = """ - a(1..5). - """ - signatures = {("a", 1)} - - muc = self.get_muc_of_program( - program_string=program, assumption_signatures=signatures, control=ctl - ) - - self.assertEqual(muc, set()) - - # --- INTERNAL - - def test_core_computer_internal_solve_no_assumptions(self): - """ - Test the CoreComputer's `_solve` function with no assumptions. - """ - - control = clingo.Control() - cc = CoreComputer(control, set()) - satisfiable, _, _ = cc._solve() # pylint: disable=W0212 - self.assertTrue(satisfiable) - - def test_core_computer_internal_compute_single_minimal_satisfiable(self): - """ - Test the CoreComputer's `_compute_single_minimal` function with a satisfiable assumption set. - """ - - control = clingo.Control() - program = "a.b.c." - control.add("base", [], program) - control.ground([("base", [])]) - assumptions = {(clingo.parse_term(c), True) for c in "abc"} - cc = CoreComputer(control, assumptions) - muc = cc._compute_single_minimal() # pylint: disable=W0212 - self.assertEqual(muc, set()) - - def test_core_computer_internal_compute_single_minimal_no_assumptions(self): - """ - Test the CoreComputer's `_compute_single_minimal` function with no assumptions. - """ - - control = clingo.Control() - cc = CoreComputer(control, set()) - self.assertRaises( - ValueError, cc._compute_single_minimal # pylint: disable=W0212 - ) diff --git a/tests/clingexplaid/test_muc.py b/tests/clingexplaid/test_muc.py new file mode 100644 index 0000000..2476ed2 --- /dev/null +++ b/tests/clingexplaid/test_muc.py @@ -0,0 +1,277 @@ +""" +Tests for the muc package +""" +import random +from typing import Set, Tuple, Optional, List +from unittest import TestCase + +import clingo +from clingexplaid.muc import CoreComputer +from clingexplaid.transformers import AssumptionTransformer +from clingexplaid.utils.types import AssumptionSet + +from .test_main import TEST_DIR + + +def get_muc_of_program( + program_string: str, + assumption_signatures: Set[Tuple[str, int]], + control: Optional[clingo.Control] = None, +) -> Tuple[AssumptionSet, CoreComputer]: + """ + Helper function to directly get the MUC of a given program string. + """ + ctl = control if control is not None else clingo.Control() + + at = AssumptionTransformer(signatures=assumption_signatures) + transformed_program = at.parse_string(program_string) + + ctl.add("base", [], transformed_program) + ctl.ground([("base", [])]) + + assumptions = at.get_assumptions(ctl) + + cc = CoreComputer(ctl, assumptions) + ctl.solve(assumptions=list(assumptions), on_core=cc.shrink) + + # if the instance was satisfiable and the on_core function wasn't called an empty set is returned, else the muc. + result = cc.minimal if cc.minimal is not None else set() + + return result, cc + + +class TestMUC(TestCase): + """ + Test cases for MUC functionality. + """ + + def _assert_muc( + self, + muc: Set[str], + valid_mucs_string_lists: List[Set[str]], + ) -> None: + """ + Asserts if a MUC is one of several valid MUC's. + """ + valid_mucs = [{clingo.parse_term(s) for s in lit_strings} for lit_strings in valid_mucs_string_lists] + parsed_muc = {clingo.parse_term(s) for s in muc} + self.assertIn(parsed_muc, valid_mucs) + + def test_core_computer_shrink_single_muc(self) -> None: + """ + Test the CoreComputer's `shrink` function with a single MUC. + """ + + ctl = clingo.Control() + + program = """ + a(1..5). + :- a(1), a(4), a(5). + """ + signatures = {("a", 1)} + + muc, cc = get_muc_of_program(program_string=program, assumption_signatures=signatures, control=ctl) + + if cc.minimal is None: + self.fail() + self._assert_muc(cc.muc_to_string(muc), [{"a(1)", "a(4)", "a(5)"}]) + + def test_core_computer_shrink_single_atomic_muc(self) -> None: + """ + Test the CoreComputer's `shrink` function with a single atomic MUC. + """ + + ctl = clingo.Control() + + program = """ + a(1..5). + :- a(3). + """ + signatures = {("a", 1)} + + muc, cc = get_muc_of_program(program_string=program, assumption_signatures=signatures, control=ctl) + + if cc.minimal is None: + self.fail() + self._assert_muc(cc.muc_to_string(muc), [{"a(3)"}]) + + def test_core_computer_shrink_multiple_atomic_mucs(self) -> None: + """ + Test the CoreComputer's `shrink` function with multiple atomic MUC's. + """ + + ctl = clingo.Control() + + program = """ + a(1..10). + :- a(3). + :- a(5). + :- a(9). + """ + signatures = {("a", 1)} + + muc, cc = get_muc_of_program(program_string=program, assumption_signatures=signatures, control=ctl) + + if cc.minimal is None: + self.fail() + self._assert_muc(cc.muc_to_string(muc), [{"a(3)"}, {"a(5)"}, {"a(9)"}]) + + def test_core_computer_shrink_multiple_mucs(self) -> None: + """ + Test the CoreComputer's `shrink` function with multiple MUC's. + """ + + ctl = clingo.Control() + + program = """ + a(1..10). + :- a(3), a(9), a(5). + :- a(5), a(1), a(2). + :- a(9), a(2), a(7). + """ + signatures = {("a", 1)} + + muc, cc = get_muc_of_program(program_string=program, assumption_signatures=signatures, control=ctl) + + if cc.minimal is None: + self.fail() + self._assert_muc( + cc.muc_to_string(muc), + [ + {"a(3)", "a(9)", "a(5)"}, + {"a(5)", "a(1)", "a(2)"}, + {"a(9)", "a(2)", "a(7)"}, + ], + ) + + def test_core_computer_shrink_large_instance_random(self) -> None: + """ + Test the CoreComputer's `shrink` function with a large random assumption set. + """ + + ctl = clingo.Control() + + n_assumptions = 1000 + random_core = random.choices(range(n_assumptions), k=10) + program = f""" + a(1..{n_assumptions}). + :- {', '.join([f"a({i})" for i in random_core])}. + """ + signatures = {("a", 1)} + + muc, cc = get_muc_of_program(program_string=program, assumption_signatures=signatures, control=ctl) + + if cc.minimal is None: + self.fail() + self._assert_muc(cc.muc_to_string(muc), [{f"a({i})" for i in random_core}]) + + def test_core_computer_shrink_satisfiable(self) -> None: + """ + Test the CoreComputer's `shrink` function with a satisfiable assumption set. + """ + + ctl = clingo.Control() + + program = """ + a(1..5). + """ + signatures = {("a", 1)} + + muc, _ = get_muc_of_program(program_string=program, assumption_signatures=signatures, control=ctl) + + self.assertEqual(muc, set()) + + def test_core_computer_get_multiple_minimal(self) -> None: + """ + Test the CoreComputer's `get_multiple_minimal` function to get multiple MUCs. + """ + + ctl = clingo.Control() + + program_path = TEST_DIR.joinpath("res/test_program_multi_muc.lp") + at = AssumptionTransformer(signatures={("a", 1)}) + parsed = at.parse_files([program_path]) + ctl.add("base", [], parsed) + ctl.ground([("base", [])]) + cc = CoreComputer(ctl, at.get_assumptions(ctl)) + + muc_generator = cc.get_multiple_minimal() + + muc_string_sets = [cc.muc_to_string(muc) for muc in list(muc_generator)] + for muc_string_set in muc_string_sets: + self.assertIn( + muc_string_set, + [{"a(1)", "a(2)"}, {"a(1)", "a(9)"}, {"a(3)", "a(5)", "a(8)"}], + ) + + def test_core_computer_get_multiple_minimal_max_mucs_2(self) -> None: + """ + Test the CoreComputer's `get_multiple_minimal` function to get multiple MUCs. + """ + + ctl = clingo.Control() + + program_path = TEST_DIR.joinpath("res/test_program_multi_muc.lp") + at = AssumptionTransformer(signatures={("a", 1)}) + parsed = at.parse_files([program_path]) + ctl.add("base", [], parsed) + ctl.ground([("base", [])]) + cc = CoreComputer(ctl, at.get_assumptions(ctl)) + + muc_generator = cc.get_multiple_minimal(max_mucs=2) + + muc_string_sets = [cc.muc_to_string(muc) for muc in list(muc_generator)] + for muc_string_set in muc_string_sets: + self.assertIn( + muc_string_set, + [{"a(1)", "a(2)"}, {"a(1)", "a(9)"}, {"a(3)", "a(5)", "a(8)"}], + ) + + self.assertEqual(len(muc_string_sets), 2) + + # INTERNAL + + def test_core_computer_internal_solve_no_assumptions(self) -> None: + """ + Test the CoreComputer's `_solve` function with no assumptions. + """ + + control = clingo.Control() + cc = CoreComputer(control, set()) + satisfiable, _, _ = cc._solve() # pylint: disable=W0212 + self.assertTrue(satisfiable) + + def test_core_computer_internal_compute_single_minimal_satisfiable(self) -> None: + """ + Test the CoreComputer's `_compute_single_minimal` function with a satisfiable assumption set. + """ + + control = clingo.Control() + program = "a.b.c." + control.add("base", [], program) + control.ground([("base", [])]) + assumptions = {(clingo.parse_term(c), True) for c in "abc"} + cc = CoreComputer(control, assumptions) + muc = cc._compute_single_minimal() # pylint: disable=W0212 + self.assertEqual(muc, set()) + + def test_core_computer_internal_compute_single_minimal_no_assumptions(self) -> None: + """ + Test the CoreComputer's `_compute_single_minimal` function with no assumptions. + """ + + control = clingo.Control() + cc = CoreComputer(control, set()) + self.assertRaises(ValueError, cc._compute_single_minimal) # pylint: disable=W0212 + + def test_core_computer_muc_to_string(self) -> None: + """ + Test the CoreComputer's `_compute_single_minimal` function with no assumptions. + """ + + control = clingo.Control() + cc = CoreComputer(control, set()) + self.assertEqual( + cc.muc_to_string({(clingo.parse_term(string), True) for string in ["this", "is", "a", "test"]}), + {"this", "is", "a", "test"}, + ) # pylint: disable=W0212 diff --git a/tests/clingexplaid/test_transformers.py b/tests/clingexplaid/test_transformers.py new file mode 100644 index 0000000..9a20738 --- /dev/null +++ b/tests/clingexplaid/test_transformers.py @@ -0,0 +1,172 @@ +""" +Tests for the transformers package +""" + +from unittest import TestCase + +import clingo +from clingexplaid.transformers import ( + AssumptionTransformer, + RuleIDTransformer, + ConstraintTransformer, + RuleSplitter, + OptimizationRemover, + FactTransformer, +) +from clingexplaid.transformers.exceptions import UntransformedException, NotGroundedException + +from .test_main import TEST_DIR, read_file + + +class TestTransformers(TestCase): + """ + Test cases for transformers. + """ + + # ASSUMPTION TRANSFORMER + + def test_assumption_transformer_parse_file(self) -> None: + """ + Test the AssumptionTransformer's `parse_file` method. + """ + program_path = TEST_DIR.joinpath("res/test_program.lp") + program_path_transformed = TEST_DIR.joinpath("res/transformed_program_assumptions_certain_signatures.lp") + at = AssumptionTransformer(signatures={(c, 1) for c in "abcdef"}) + result = at.parse_files([program_path]) + self.assertEqual(result.strip(), read_file(program_path_transformed).strip()) + + def test_assumption_transformer_parse_file_no_signatures(self) -> None: + """ + Test the AssumptionTransformer's `parse_file` method with no signatures provided. + """ + program_path = TEST_DIR.joinpath("res/test_program.lp") + program_path_transformed = TEST_DIR.joinpath("res/transformed_program_assumptions_all.lp") + at = AssumptionTransformer() + result = at.parse_files([program_path]) + self.assertEqual(result.strip(), read_file(program_path_transformed).strip()) + + def test_assumption_transformer_get_assumptions_before_transformation(self) -> None: + """ + Test the AssumptionTransformer's behavior when get_assumptions is called before transformation. + """ + at = AssumptionTransformer() + control = clingo.Control() + self.assertRaises(UntransformedException, lambda: at.get_assumptions(control)) + + def test_assumption_transformer_get_assumptions_before_grounding(self) -> None: + """ + Test the AssumptionTransformer's behavior when get_assumptions is called before transformation. + """ + program_path = TEST_DIR.joinpath("res/test_program.lp") + at = AssumptionTransformer() + control = clingo.Control() + at.parse_files([program_path]) + self.assertRaises(NotGroundedException, lambda: at.get_assumptions(control)) + + def test_assumption_transformer_visit_definition(self) -> None: + """ + Test the AssumptionTransformer's detection of constant definitions. + """ + program_path = TEST_DIR.joinpath("res/test_program_constants.lp") + at = AssumptionTransformer() + control = clingo.Control() + result = at.parse_files([program_path]) + control.add("base", [], result) + control.ground([("base", [])]) + self.assertEqual( + at.program_constants, + {k: clingo.parse_term(v) for k, v in {"number": "42", "message": "helloworld"}.items()}, + ) + + # RULE ID TRANSFORMER + + def test_rule_id_transformer(self) -> None: + """ + Test the RuleIDTransformer's `parse_file` and `get_assumptions` methods. + """ + program_path = TEST_DIR.joinpath("res/test_program.lp") + program_path_transformed = TEST_DIR.joinpath("res/transformed_program_rule_ids.lp") + rt = RuleIDTransformer() + result = rt.parse_file(program_path) + self.assertEqual(result.strip(), read_file(program_path_transformed).strip()) + assumptions = { + (clingo.parse_term(s), True) + for s in [ + "_rule(1)", + "_rule(2)", + "_rule(3)", + "_rule(4)", + "_rule(5)", + "_rule(6)", + "_rule(7)", + ] + } + self.assertEqual(assumptions, rt.get_assumptions()) + + # CONSTRAINT TRANSFORMER + + def test_constraint_transformer(self) -> None: + """ + Test the ConstraintTransformer's `parse_file` method. + """ + program_path = TEST_DIR.joinpath("res/test_program_constraints.lp") + program_path_transformed = TEST_DIR.joinpath("res/transformed_program_constraints.lp") + ct = ConstraintTransformer(constraint_head_symbol="unsat") + result = ct.parse_files([program_path]) + self.assertEqual(result.strip(), read_file(program_path_transformed).strip()) + + def test_constraint_transformer_include_id(self) -> None: + """ + Test the ConstraintTransformer's `parse_file` method. + """ + program_path = TEST_DIR.joinpath("res/test_program_constraints.lp") + program_path_transformed = TEST_DIR.joinpath("res/transformed_program_constraints_id.lp") + ct = ConstraintTransformer(constraint_head_symbol="unsat", include_id=True) + with open(program_path, "r", encoding="utf-8") as f: + result = ct.parse_string(f.read()) + self.assertEqual(result.strip(), read_file(program_path_transformed).strip()) + + # RULE SPLITTER + + def test_rule_splitter(self) -> None: + """ + Test the RuleSplitter's `parse_file` method. + """ + + program_path = TEST_DIR.joinpath("res/test_program_rules.lp") + program_path_transformed = TEST_DIR.joinpath("res/transformed_program_rules_split.lp") + rs = RuleSplitter() + result = rs.parse_file(program_path) + self.assertEqual(result.strip(), read_file(program_path_transformed).strip()) + + # OPTIMIZATION REMOVER + + def test_optimization_remover(self) -> None: + """ + Test the OptimizationRemover's `parse_file` and `parse_string_method` method. + """ + + program_path = TEST_DIR.joinpath("res/test_program_optimization.lp") + program_path_transformed = TEST_DIR.joinpath("res/transformed_program_optimization.lp") + optrm = OptimizationRemover() + result_files = optrm.parse_files([program_path]) + with open(program_path, "r", encoding="utf-8") as f: + result_string = optrm.parse_string(f.read()) + self.assertEqual(result_files.strip(), read_file(program_path_transformed).strip()) + self.assertEqual(result_files.strip(), result_string.strip()) + + # FACT TRANSFORMER + + def test_fact_transformer(self) -> None: + """ + Test the FactTransformer's `parse_files` and `parse_string_method` method. + """ + + program_path = TEST_DIR.joinpath("res/test_program.lp") + program_path_transformed = TEST_DIR.joinpath("res/transformed_program_facts.lp") + ft = FactTransformer(signatures={("a", 1), ("d", 1), ("e", 1)}) + result_files = ft.parse_files([program_path]) + with open(program_path, "r", encoding="utf-8") as f: + result_string = ft.parse_string(f.read()) + self.assertEqual(result_files.strip(), read_file(program_path_transformed).strip()) + self.assertEqual(result_files.strip(), result_string.strip()) diff --git a/tests/clingexplaid/test_unsat_constraints.py b/tests/clingexplaid/test_unsat_constraints.py new file mode 100644 index 0000000..1625924 --- /dev/null +++ b/tests/clingexplaid/test_unsat_constraints.py @@ -0,0 +1,74 @@ +""" +Tests for the unsat_constraints package +""" + +from typing import Dict, Optional +from unittest import TestCase + +from clingexplaid.unsat_constraints import UnsatConstraintComputer + +from .test_main import TEST_DIR + + +class TestUnsatConstraints(TestCase): + """ + Test cases for unsat constraints functionality. + """ + + # UNSAT CONSTRAINT COMPUTER + + def unsat_constraint_computer_helper( + self, + constraint_strings: Dict[int, str], + constraint_lines: Dict[int, int], + constraint_files: Dict[int, str], + assumption_string: Optional[str] = None, + ) -> None: + """ + Helper function for testing the UnsatConstraintComputer + """ + for method in ["from_files", "from_string"]: + program_path = TEST_DIR.joinpath("res/test_program_unsat_constraints.lp") + ucc = UnsatConstraintComputer() + if method == "from_files": + ucc.parse_files([str(program_path)]) + elif method == "from_string": + with open(program_path, "r", encoding="utf-8") as f: + ucc.parse_string(f.read()) + unsat_constraints = ucc.get_unsat_constraints(assumption_string=assumption_string) + self.assertEqual(set(unsat_constraints.values()), set(constraint_strings.values())) + + for c_id in unsat_constraints: + loc = ucc.get_constraint_location(c_id) + if method == "from_files": + # only check the source file if .from_files is used to initialize + self.assertEqual(loc.begin.filename, constraint_files[c_id]) # type: ignore + self.assertEqual(loc.begin.line, constraint_lines[c_id]) # type: ignore + + def test_unsat_constraint_computer(self) -> None: + """ + Testing the UnsatConstraintComputer without assumptions. + """ + self.unsat_constraint_computer_helper( + constraint_strings={2: ":- not a."}, + constraint_lines={2: 4}, + constraint_files={2: str(TEST_DIR.joinpath("res/test_program_unsat_constraints.lp"))}, + ) + + def test_unsat_constraint_computer_with_assumptions(self) -> None: + """ + Testing the UnsatConstraintComputer with assumptions. + """ + self.unsat_constraint_computer_helper( + constraint_strings={1: ":- a."}, + constraint_lines={1: 3}, + constraint_files={1: str(TEST_DIR.joinpath("res/test_program_unsat_constraints.lp"))}, + assumption_string="a", + ) + + def test_unsat_constraint_computer_not_initialized(self) -> None: + """ + Testing the UnsatConstraintComputer without initializing it. + """ + ucc = UnsatConstraintComputer() + self.assertRaises(ValueError, ucc.get_unsat_constraints) diff --git a/tests/clingexplaid/test_utils.py b/tests/clingexplaid/test_utils.py new file mode 100644 index 0000000..7b3e30f --- /dev/null +++ b/tests/clingexplaid/test_utils.py @@ -0,0 +1,29 @@ +""" +Tests for the utils package +""" + +from unittest import TestCase + +from clingexplaid.utils import get_signatures_from_model_string, get_constants_from_arguments + + +class TestUtils(TestCase): + """ + Test cases for clingexplaid. + """ + + def test_get_signatures_from_model_string(self) -> None: + """ + Test getting signatures from a model string. + """ + model_string = "a(1,2) a(1,5) a(3,5) a(1,2,3) a(1,3,5), foo(bar), zero" + signatures = get_signatures_from_model_string(model_string) + self.assertEqual(signatures, {("a", 2), ("a", 3), ("foo", 1), ("zero", 0)}) + + def test_get_constants_from_arguments(self) -> None: + """ + Test getting constants from argument vector. + """ + self.assertEqual(get_constants_from_arguments(["-c", "a=42"]), {"a": "42"}) + self.assertEqual(get_constants_from_arguments(["test/dir/file.lp", "--const", "blob=value"]), {"blob": "value"}) + self.assertEqual(get_constants_from_arguments(["--const", "-a", "test/42"]), {}) diff --git a/tests/test_main.py b/tests/test_main.py index b3ae8ce..19730b2 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -1,11 +1,12 @@ """ Test cases for main application functionality. """ -import logging + from io import StringIO from unittest import TestCase -from clingexplaid.utils.logger import setup_logger +from clingexplaid.utils import logging +from clingexplaid.utils.logging import configure_logging, get_logger from clingexplaid.utils.parser import get_parser @@ -14,18 +15,17 @@ class TestMain(TestCase): Test cases for main application functionality. """ - def test_logger(self): + def test_logger(self) -> None: """ Test the logger. """ - log = setup_logger("global", logging.INFO) sio = StringIO() - for handler in log.handlers: - handler.setStream(sio) + configure_logging(sio, logging.INFO, True) + log = get_logger("main") log.info("test123") self.assertRegex(sio.getvalue(), "test123") - def test_parser(self): + def test_parser(self) -> None: """ Test the parser. """